From e205975ac5407a7166a28969895402f1f8761e9c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 5 Apr 2018 14:19:43 +0200 Subject: [PATCH 0001/1255] owl to rules example - draft --- vlog4j-examples/pom.xml | 57 +- ...6-9def-819c180bbe7e_ecomparison.n3.owl.xml | 297 ++++++++++ ...2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml | 558 ++++++++++++++++++ .../owlapi/RestrictedChaseOnOwlOntology.java | 40 ++ 4 files changed, 926 insertions(+), 26 deletions(-) create mode 100644 vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml create mode 100644 vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index a589d8640..3c76a6fdf 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -1,26 +1,31 @@ - - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.0.1-SNAPSHOT - - - vlog4j-examples - jar - - VLog4j Examples - Contains examples and usage instructions describing the basic functionality of VLog4j - - - - org.semanticweb.vlog4j - vlog4j-core - ${project.version} - - - + + + + 4.0.0 + + + org.semanticweb.vlog4j + vlog4j-parent + 0.0.1-SNAPSHOT + + + vlog4j-examples + jar + + VLog4j Examples + Contains examples and usage instructions describing the basic functionality of VLog4j + + + + org.semanticweb.vlog4j + vlog4j-core + ${project.version} + + + org.semanticweb.vlog4j + vlog4j-owlapi + ${project.version} + + + diff --git a/vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml b/vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml new file mode 100644 index 000000000..86bb928e2 --- /dev/null +++ b/vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml @@ -0,0 +1,297 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + 1 + + + + + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml b/vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml new file mode 100644 index 000000000..7e90aaee0 --- /dev/null +++ b/vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml @@ -0,0 +1,558 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + 1 + + + + + + + 1 + + + + + + + + + + + + + + 1 + + + + + + + 1 + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java new file mode 100644 index 000000000..16905429f --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java @@ -0,0 +1,40 @@ +package org.semanticweb.vlog4j.examples.owlapi; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; + +public class RestrictedChaseOnOwlOntology { + + public static void main(String[] args) throws OWLOntologyCreationException, ReasonerStateException, + EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + OWLOntology ontology = ontologyManager.loadOntologyFromOntologyDocument( + new File("src\\main\\data\\owl\\4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml")); + + OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + owlToRulesConverter.addOntology(ontology); + + try (Reasoner reasoner = Reasoner.getInstance()) { + reasoner.addRules(new ArrayList(owlToRulesConverter.getRules())); + reasoner.addFacts(owlToRulesConverter.getFacts()); + reasoner.load(); + } + + // TODO this might fail because of EDB/IDB + // TODO query, reason + + } + +} From 5ea6a6a72090f46f375a22c997727377e3584bba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Fri, 13 Apr 2018 22:10:26 +0200 Subject: [PATCH 0002/1255] update usage instructions --- README.md | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 604e36a4a..8f655f378 100644 --- a/README.md +++ b/README.md @@ -8,12 +8,27 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -To build vlog4j from source, you need to install Maven and perform the following steps: +The current release of VLog4j is version 0.1.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: -* In the directory ```./vlog-core/lib``` copy the jar to ```jvlog-local.jar``` (the current default is a Linux library there) -* Run ```mvn initialize``` -* Run ```mvn install``` -* If this fails, you can run the script build-vlog-library.sh to compile and install this jar from the latest online sources using your local compiler +``` + + org.semanticweb.vlog4j + vlog4j-core + 0.1.0 + +``` + +You need to use Java 1.8 or above. Available modules include: + +* **vlog4j-core**: essential data models for rules and facts, and essential reasoner functionality +* **vlog4j-rdf**: support for reading from RDF files +* **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API + +The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, own binaries can be compiled as follows: + +* Run [build-vlog-library.sh](https://github.com/mkroetzsch/vlog4j/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system and copy it to ```./vlog4j-core/lib/jvlog-local.jar``` +* Run ```mvn -Pdevelopment initialize```. This will install the newly compiled jar file in place of the officially distributed vlog4j-base library. +* Run ```mvn install``` to test if the setup works Documentation ------------- From baffb5d07c68e56f8b78ffaa20aa8e532d1cc4d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Fri, 13 Apr 2018 22:13:06 +0200 Subject: [PATCH 0003/1255] mention examples --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 8f655f378..407b0d3ca 100644 --- a/README.md +++ b/README.md @@ -33,4 +33,5 @@ The released packages use vlog4j-base, which packages system-dependent binaries Documentation ------------- +* The module **vlog4j-examples** includes short example programs that demonstrate some common use cases * [JavaDoc](https://mkroetzsch.github.io/vlog4j/) is available online and through the Maven packages. From a2df31d9f3f4db765839cdfdea02d2a18d8b414b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 13 Apr 2018 22:13:51 +0200 Subject: [PATCH 0004/1255] change version to 0.2.0-SNAPSHOT --- pom.xml | 2 +- vlog4j-core/pom.xml | 2 +- vlog4j-examples/pom.xml | 2 +- vlog4j-owlapi/pom.xml | 2 +- vlog4j-rdf/pom.xml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index 79d6b321d..1d8268a39 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.0.1 + 0.2.0-SNAPSHOT pom VLog4j diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index 30b7edd7c..3977bea90 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.0.1 + 0.2.0-SNAPSHOT vlog4j-core diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 2c4f219fa..7ba9b0bb7 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.0.1 + 0.2.0-SNAPSHOT vlog4j-examples diff --git a/vlog4j-owlapi/pom.xml b/vlog4j-owlapi/pom.xml index 318cf8c96..cf7c2d473 100644 --- a/vlog4j-owlapi/pom.xml +++ b/vlog4j-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.0.1 + 0.2.0-SNAPSHOT vlog4j-owlapi diff --git a/vlog4j-rdf/pom.xml b/vlog4j-rdf/pom.xml index 3aae49455..f10429ad6 100644 --- a/vlog4j-rdf/pom.xml +++ b/vlog4j-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.0.1 + 0.2.0-SNAPSHOT vlog4j-rdf From 46eca8145229cf141fbec2975814af20c5b8eddb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Fri, 13 Apr 2018 22:17:59 +0200 Subject: [PATCH 0005/1255] clarification --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index 407b0d3ca..1c91fe56a 100644 --- a/README.md +++ b/README.md @@ -26,8 +26,7 @@ You need to use Java 1.8 or above. Available modules include: The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, own binaries can be compiled as follows: -* Run [build-vlog-library.sh](https://github.com/mkroetzsch/vlog4j/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system and copy it to ```./vlog4j-core/lib/jvlog-local.jar``` -* Run ```mvn -Pdevelopment initialize```. This will install the newly compiled jar file in place of the officially distributed vlog4j-base library. +* Run [build-vlog-library.sh](https://github.com/mkroetzsch/vlog4j/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./vlog4j-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog4j-base. * Run ```mvn install``` to test if the setup works Documentation From 4c24c220d39842435c86d9eb4768e0e4250bea5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Fri, 13 Apr 2018 22:22:16 +0200 Subject: [PATCH 0006/1255] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1c91fe56a..ba5d6e1c3 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -VLog4J +VLog4j ====== [![Build Status](https://travis-ci.org/mkroetzsch/vlog4j.png?branch=master)](https://travis-ci.org/mkroetzsch/vlog4j) [![Coverage Status](https://coveralls.io/repos/github/mkroetzsch/vlog4j/badge.svg?branch=master)](https://coveralls.io/github/mkroetzsch/vlog4j?branch=master) From 5e6671032d6b6b4c8486c2bcc85ffd9fd31eb68b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Fri, 13 Apr 2018 23:43:55 +0200 Subject: [PATCH 0007/1255] add Maven Central badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ba5d6e1c3..4808d64fb 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ VLog4j ====== [![Build Status](https://travis-ci.org/mkroetzsch/vlog4j.png?branch=master)](https://travis-ci.org/mkroetzsch/vlog4j) [![Coverage Status](https://coveralls.io/repos/github/mkroetzsch/vlog4j/badge.svg?branch=master)](https://coveralls.io/github/mkroetzsch/vlog4j?branch=master) +[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.vlog4j/vlog4j-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.vlog4j%22) A Java library based on the [VLog rule engine](https://github.com/karmaresearch/vlog) From 8144f4331e9a37119896176325ac8055d8e28c01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Mon, 16 Apr 2018 13:53:26 +0200 Subject: [PATCH 0008/1255] Revert to building jars locally on travis This fixes the build issues that occur with the standard Maven distributed binaries on this old Linux box. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f89be3cb2..68930cbba 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ before_install: - sudo apt-get install gcc-5 -y # - eval “CC=gcc-5 && CXX=g++-5” ## Uncomment line below to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar - # - sh ./build-vlog-library.sh + - sh ./build-vlog-library.sh after_success: - mvn clean cobertura:cobertura coveralls:cobertura From 87292536a809e86424a6644bc17de4fd680a6346 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Mon, 16 Apr 2018 14:48:47 +0200 Subject: [PATCH 0009/1255] Try older version of coveralls --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 1d8268a39..d58af1530 100644 --- a/pom.xml +++ b/pom.xml @@ -215,7 +215,7 @@ org.eluder.coveralls coveralls-maven-plugin - 4.3.0 + 2.1.0 From 86ee7516125998845dcdaedb8b58e0ad204f2d27 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 19 Apr 2018 20:40:17 +0200 Subject: [PATCH 0010/1255] correct SPARQL query wikidata prefixes from p: to wdt: to obtain "father" and "mother" binary relations --- .../implementation/LoadDataFromSparqlQueryTest.java | 12 ++++++------ .../SparqlQueryResultDataSourceTest.java | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java index d729bdc55..1940bda06 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java @@ -58,7 +58,7 @@ public void testSimpleSparqlQuery() Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // a has father b - "?a p:P22 ?b"); + "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); try (final Reasoner reasoner = Reasoner.getInstance()) { @@ -83,7 +83,7 @@ public void testSimpleSparqlQueryHttps() Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // a has father b - "?a p:P22 ?b"); + "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); try (final Reasoner reasoner = Reasoner.getInstance()) { @@ -116,7 +116,7 @@ public void testSimpleSparqlQuery2() Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // a has father b - "?a p:P22 ?b ."); + "?a wdt:P22 ?b ."); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); try (final Reasoner reasoner = Reasoner.getInstance()) { @@ -139,7 +139,7 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c - "?b p:P22 ?a .\n" + "?b p:P25 ?c"); + "?b wdt:P22 ?a .\n" + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); try (final Reasoner reasoner = Reasoner.getInstance()) { @@ -159,7 +159,7 @@ public void testConjunctiveQuery() Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c - "?b p:P22 ?a ." + "?b p:P25 ?c"); + "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); try (final Reasoner reasoner = Reasoner.getInstance()) { @@ -182,7 +182,7 @@ public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c - "?b p:P22 ?a ." + "?b p:P25 ?c"); + "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); try (final Reasoner reasoner = Reasoner.getInstance()) { // TODO must validate predicate arity sonner diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index ce44498b4..7fd532f24 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -44,11 +44,11 @@ public void testToStringSimpleSparqlQueryResultDataSource() throws MalformedURLE final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, - "?a p:P22 ?b"); + "?a wdt:P22 ?b"); final String configString = dataSource.toConfigString(); final String expectedStringConfig = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" - + "EDB%1$d_param2=?a p:P22 ?b\n"; + + "EDB%1$d_param2=?a wdt:P22 ?b\n"; assertEquals(expectedStringConfig, configString); } @@ -60,7 +60,7 @@ public void testUniqueVariableNamesQuery() Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("b"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, - "?a p:P22 ?b"); + "?a wdt:P22 ?b"); assertEquals(1, dataSource.getQueryVariables().size()); } From 9e2e7f6f9eee9d9eaf9d3e59591da0947223208f Mon Sep 17 00:00:00 2001 From: David Carral Date: Fri, 20 Apr 2018 11:15:10 +0200 Subject: [PATCH 0011/1255] Added Demo1 and Demo2 --- vlog4j-examples/pom.xml | 5 +++ .../owlapi/RestrictedChaseOnOwlOntology.java | 38 +++++++++++++++---- 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 8021cd2e7..8482fb784 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -22,6 +22,11 @@ vlog4j-core ${project.version} + + ${project.groupId} + vlog4j-owlapi + ${project.version} + diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java index 16905429f..340746c35 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java @@ -3,16 +3,22 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; +import java.util.Set; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.vlog4j.core.model.api.Atom; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; public class RestrictedChaseOnOwlOntology { @@ -20,21 +26,39 @@ public class RestrictedChaseOnOwlOntology { public static void main(String[] args) throws OWLOntologyCreationException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); - OWLOntology ontology = ontologyManager.loadOntologyFromOntologyDocument( - new File("src\\main\\data\\owl\\4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml")); + OWLOntology ontology = ontologyManager.loadOntologyFromOntologyDocument(new File("src" + File.separator + "main" + + File.separator + "data" + File.separator + "owl" + File.separator + "bike.owl")); OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); owlToRulesConverter.addOntology(ontology); + // Print out rules + Set rules = owlToRulesConverter.getRules(); + for (Rule rule : rules) + System.out.println(rule); + System.out.println(); + + // Print out facts + Set facts = owlToRulesConverter.getFacts(); + for (Atom fact : facts) + System.out.println(fact); + System.out.println(); + try (Reasoner reasoner = Reasoner.getInstance()) { + // Load and reason reasoner.addRules(new ArrayList(owlToRulesConverter.getRules())); reasoner.addFacts(owlToRulesConverter.getFacts()); reasoner.load(); - } - - // TODO this might fail because of EDB/IDB - // TODO query, reason + reasoner.reason(); + // Print out Query Answers + Predicate pred = Expressions.makePredicate("http://www.bike.org#isPartOf", 2); + Variable vx = Expressions.makeVariable("x"); + Variable vy = Expressions.makeVariable("y"); + QueryResultIterator answers = reasoner.answerQuery(Expressions.makeAtom(pred, vx, vy), true); + while (answers.hasNext()) { + System.out.println(answers.next()); + } + } } - } From 684a997d4502ecb97f6078de812ef5f07fe4edcf Mon Sep 17 00:00:00 2001 From: David Carral Date: Fri, 20 Apr 2018 11:16:04 +0200 Subject: [PATCH 0012/1255] Added Demo1.java and Demo2.java. --- vlog4j-examples/src/main/data/owl/bike.owl | 84 +++++++++++++++++++ .../vlog4j/examples/owlapi/Demo1.java | 7 ++ .../vlog4j/examples/owlapi/Demo2.java | 5 ++ 3 files changed, 96 insertions(+) create mode 100644 vlog4j-examples/src/main/data/owl/bike.owl create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java diff --git a/vlog4j-examples/src/main/data/owl/bike.owl b/vlog4j-examples/src/main/data/owl/bike.owl new file mode 100644 index 000000000..a94284aed --- /dev/null +++ b/vlog4j-examples/src/main/data/owl/bike.owl @@ -0,0 +1,84 @@ +@prefix : . +@prefix owl: . +@prefix rdf: . +@prefix xml: . +@prefix xsd: . +@prefix rdfs: . +@base . + + rdf:type owl:Ontology . + +################################################################# +# Object Properties +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPart +:hasPart rdf:type owl:ObjectProperty ; + owl:inverseOf :isPartOf . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPartSource +:hasPartSource rdf:type owl:ObjectProperty ; + rdfs:subPropertyOf :hasPart . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOf +:isPartOf rdf:type owl:ObjectProperty . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOfSource +:isPartOfSource rdf:type owl:ObjectProperty ; + rdfs:subPropertyOf :isPartOf . + + +################################################################# +# Classes +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Bike +:Bike rdf:type owl:Class ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onProperty :hasPart ; + owl:someValuesFrom :Wheel + ] . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#BikeSource +:BikeSource rdf:type owl:Class ; + rdfs:subClassOf :Bike . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Wheel +:Wheel rdf:type owl:Class ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onProperty :isPartOf ; + owl:someValuesFrom :Bike + ] . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#WheelSource +:WheelSource rdf:type owl:Class ; + rdfs:subClassOf :Wheel . + + +################################################################# +# Individuals +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b1 +:b1 rdf:type owl:NamedIndividual , + :BikeSource ; + :hasPartSource :w1 . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b2 +:b2 rdf:type owl:NamedIndividual , + :BikeSource . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#w1 +:w1 rdf:type owl:NamedIndividual , + :WheelSource . + + +### Generated by the OWL API (version 4.2.8.20170104-2310) https://github.com/owlcs/owlapi diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java new file mode 100644 index 000000000..a08b675f3 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java @@ -0,0 +1,7 @@ +package org.semanticweb.vlog4j.examples.owlapi; + +public class Demo1 { + public static void main(String[] args) { + + } +} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java new file mode 100644 index 000000000..9f140d9e4 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java @@ -0,0 +1,5 @@ +package org.semanticweb.vlog4j.examples.owlapi; + +public class Demo2 { + +} From 88b5282a216a52fe0925706a66c4cb0a8addddae Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 May 2018 17:16:30 +0200 Subject: [PATCH 0013/1255] remove unused classes --- .../java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java | 7 ------- .../java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java | 5 ----- .../examples/owlapi/RestrictedChaseOnOwlOntology.java | 3 +-- 3 files changed, 1 insertion(+), 14 deletions(-) delete mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java delete mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java deleted file mode 100644 index a08b675f3..000000000 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo1.java +++ /dev/null @@ -1,7 +0,0 @@ -package org.semanticweb.vlog4j.examples.owlapi; - -public class Demo1 { - public static void main(String[] args) { - - } -} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java deleted file mode 100644 index 9f140d9e4..000000000 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/Demo2.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.semanticweb.vlog4j.examples.owlapi; - -public class Demo2 { - -} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java index 340746c35..f834540a9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java @@ -26,8 +26,7 @@ public class RestrictedChaseOnOwlOntology { public static void main(String[] args) throws OWLOntologyCreationException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); - OWLOntology ontology = ontologyManager.loadOntologyFromOntologyDocument(new File("src" + File.separator + "main" - + File.separator + "data" + File.separator + "owl" + File.separator + "bike.owl")); + OWLOntology ontology = ontologyManager.loadOntologyFromOntologyDocument(new File("src/main/data/owl/bike.owl")); OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); owlToRulesConverter.addOntology(ontology); From 066b582f1c4b00139c8ec05ef58b1a757bf9c84a Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 May 2018 21:20:33 +0200 Subject: [PATCH 0014/1255] Example for adding data from SPARQL query results on remote database endpoints. --- .../AddDataFromSparqlQueryResults.java | 209 ++++++++++++++++++ .../owlapi/RestrictedChaseOnOwlOntology.java | 20 ++ 2 files changed, 229 insertions(+) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java new file mode 100644 index 000000000..ea1072f49 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java @@ -0,0 +1,209 @@ +package org.semanticweb.vlog4j.examples; + +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.Atom; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.DataSource; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; + +/** + * This is a simple example of adding data from the result of a SPARQL query on + * a remote database endpoint. In this example, we will query WikiData for + * titles of publications that have authors who have children together. + * + * @author Irina Dragoste + * + */ +public class AddDataFromSparqlQueryResults { + + /** + * WikiData author + * property id. + */ + private static final String WIKIDATA_AUTHOR_PROPERTY = "wdt:P50"; + /** + * WikiData title + * property id. Published title of a work, such as a newspaper article, a + * literary work, a website, or a performance work + */ + private static final String WIKIDATA_TITLE_PROPERTY = "wdt:P1476"; + /** + * WikiData mother + * property id. + */ + private static final String WIKIDATA_MOTHER_PROPERTY = "wdt:P25"; + /** + * WikiData father + * property id. + */ + private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; + + public static void main(String[] args) + throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + + /* + * The WikiData SPARQL query endpoint. + */ + final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + /* + * SPARQL query body that looks for publications where two authors of the + * publication are the mother, respectively father of the same child. + */ + final String queryBody = " ?publication " + WIKIDATA_TITLE_PROPERTY + " ?title ." + "?publication " + + WIKIDATA_AUTHOR_PROPERTY + " ?mother ." + " ?publication " + WIKIDATA_AUTHOR_PROPERTY + " ?father ." + + " ?child " + WIKIDATA_MOTHER_PROPERTY + " ?mother ." + " ?child " + WIKIDATA_FATHER_PROPERTY + + " ?father ."; + + final Variable titleVariable = Expressions.makeVariable("title"); + final Variable motherVariable = Expressions.makeVariable("mother"); + final Variable fatherVariable = Expressions.makeVariable("father"); + + /* + * The query variables are the variables from the query body which will appear + * in the query result, in the given order. Fact resulting from this query will + * have as terms the title of the publication, the mother publication author and + * the father publication author. + */ + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(titleVariable, motherVariable, fatherVariable)); + + /* + * We query WikiData with the SPARQL query composed of the query variables and + * query body. The query result is a DataSource we will associate to a + * predicate. + */ + final DataSource sparqlQueryResultDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + queryVariables, queryBody); + + /* + * Predicate that will be mapped to the SPARQL query result. It must have the + * same arity as the query variables size. In this case, we have 3 query + * variables (title, mother and father). + */ + final Predicate titleOfPublicationThatHasAuthorsWhoParentTheSameChild = Expressions + .makePredicate("havePublicationsTogether", 3); + + try (Reasoner reasoner = Reasoner.getInstance()) { + + /* + * The SPARQL query results will be added to the reasoner knowledge base, as + * facts associated to the predicate + * titleOfPublicationThatHasAuthorsWhoParentTheSameChild. + */ + reasoner.addFactsFromDataSource(titleOfPublicationThatHasAuthorsWhoParentTheSameChild, + sparqlQueryResultDataSource); + + reasoner.load(); + + /* + * We construct a query atom for the predicated associated to the SPARQL query + * result. + */ + Atom queryAtom = Expressions.makeAtom(titleOfPublicationThatHasAuthorsWhoParentTheSameChild, + Expressions.makeVariable("x"), Expressions.makeVariable("y"), Expressions.makeVariable("z")); + + /* We query the reasoner for facts of the SPARQL query result predicate. */ + System.out.println("Publications that have authors who parent the same child:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + List queryResultTerms = queryResult.getTerms(); + + System.out.println("- title: " + queryResultTerms.get(0) + ", mother author: " + + queryResultTerms.get(1) + ", father author: " + queryResultTerms.get(2)); + }); + } + + Atom haveChildrenTogether = Expressions.makeAtom("haveChildrenTogether", Expressions.makeVariable("y"), + Expressions.makeVariable("z")); + Atom isMother = Expressions.makeAtom("isMother", Expressions.makeVariable("y")); + Atom isFather = Expressions.makeAtom("isFather", Expressions.makeVariable("z")); + Conjunction ruleHeadConjunction = Expressions.makeConjunction(haveChildrenTogether, isMother, isFather); + /* + * haveChildrenTogetherRuleHeadAtom(y,z), isMother(y), isFather(z) :- + * titleOfPublicationThatHasAuthorsWhoParentTheSameChild(x,y,z) + */ + Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(queryAtom)); + + /* + * We reset the reasoner in order to add the created rule, and reason on the + * data added from the WikiData SPARQL query result. + */ + reasoner.resetReasoner(); + reasoner.addRules(rule); + reasoner.load(); + reasoner.reason(); + + /* We query the reasoner for facts of the haveChildrenTogether predicate. */ + System.out.println("Pairs of authors who have children together and wrote publications together:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(haveChildrenTogether, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + List queryResultTerms = queryResult.getTerms(); + + System.out + .println("- author1: " + queryResultTerms.get(0) + ", author2: " + queryResultTerms.get(1)); + }); + } + + /* We query the reasoner for facts of the isMother predicate. */ + System.out.println("Mothers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isMother, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + List queryResultTerms = queryResult.getTerms(); + + System.out + .println("- mother: " + queryResultTerms.get(0)); + }); + } + + /* We query the reasoner for facts of the isFather predicate. */ + System.out.println("Fathers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isFather, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + List queryResultTerms = queryResult.getTerms(); + + System.out + .println("- father: " + queryResultTerms.get(0)); + }); + } + + + } + } + +} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java index f834540a9..ae913f816 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples.owlapi; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.File; import java.io.IOException; import java.util.ArrayList; From 007f7e335a6de7219996a3fad877b4f2d02a59dc Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 May 2018 23:10:00 +0200 Subject: [PATCH 0015/1255] Documented example of transforming an OWL ontology into rules and facts. --- ...6-9def-819c180bbe7e_ecomparison.n3.owl.xml | 297 ---------- ...2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml | 558 ------------------ .../owlapi/RestrictedChaseOnOwlOntology.java | 58 +- 3 files changed, 44 insertions(+), 869 deletions(-) delete mode 100644 vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml delete mode 100644 vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml diff --git a/vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml b/vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml deleted file mode 100644 index 86bb928e2..000000000 --- a/vlog4j-examples/src/main/data/owl/0000005-SWRLRules-56fd8bdd-2be0-45b6-9def-819c180bbe7e_ecomparison.n3.owl.xml +++ /dev/null @@ -1,297 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1 - - - - - - - 1 - - - - - - - - - - - - - - 1 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml b/vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml deleted file mode 100644 index 7e90aaee0..000000000 --- a/vlog4j-examples/src/main/data/owl/4ecd5765-8ee2-4f18-a7e7-4daf64288dc4_Plant.owl.owl.xml +++ /dev/null @@ -1,558 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1 - - - - - - - 1 - - - - - - - 1 - - - - - - - - - - - - - - 1 - - - - - - - 1 - - - - - - - 1 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java index ae913f816..a60893639 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java @@ -30,7 +30,7 @@ import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; @@ -41,42 +41,72 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; +/** + * This example shows how an OWL ontology can be transformed into {@link Rule}s + * and {@link Fact}s using vlog4j-owlapi dependency libraries. + * + * @author Irina Dragoste + * + */ public class RestrictedChaseOnOwlOntology { public static void main(String[] args) throws OWLOntologyCreationException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + + /* Bike ontology is loaded from a Bike file using OWL API */ OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = ontologyManager.loadOntologyFromOntologyDocument(new File("src/main/data/owl/bike.owl")); + /* + * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in + * source ontology to target Rule and Fact objects + */ OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); owlToRulesConverter.addOntology(ontology); - // Print out rules + /* Print out the Rules extracted from bike ontology. */ + System.out.println("Rules extracted from Bike ontology:"); Set rules = owlToRulesConverter.getRules(); - for (Rule rule : rules) - System.out.println(rule); + for (Rule rule : rules) { + System.out.println(" - rule: " + rule); + } System.out.println(); - // Print out facts + /* Print out Facts extracted from bike ontology */ + System.out.println("Facts extracted from Bike ontology:"); Set facts = owlToRulesConverter.getFacts(); - for (Atom fact : facts) - System.out.println(fact); + for (Atom fact : facts) { + System.out.println(" - fact: " + fact); + } System.out.println(); try (Reasoner reasoner = Reasoner.getInstance()) { - // Load and reason + /* Load rules and facts obtained from the ontology */ reasoner.addRules(new ArrayList(owlToRulesConverter.getRules())); reasoner.addFacts(owlToRulesConverter.getFacts()); reasoner.load(); + /* Reason over loaded ontology */ reasoner.reason(); - // Print out Query Answers - Predicate pred = Expressions.makePredicate("http://www.bike.org#isPartOf", 2); + /* Query for the parts of bike constant "b2". */ Variable vx = Expressions.makeVariable("x"); - Variable vy = Expressions.makeVariable("y"); - QueryResultIterator answers = reasoner.answerQuery(Expressions.makeAtom(pred, vx, vy), true); - while (answers.hasNext()) { - System.out.println(answers.next()); + Constant b2 = Expressions.makeConstant("http://www.bike.org#b2"); + Atom isPartOfPairs = Expressions.makeAtom("http://www.bike.org#isPartOf", vx, b2); + + /* + * See that an unnamed individual has been introduced to satisfy + * owl:someValuesFrom restriction: + * + * http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Bike + * :Bike rdf:type owl:Class ; + * rdfs:subClassOf [ rdf:type owl:Restriction ; + * owl:onProperty :hasPart ; + * owl:someValuesFrom :Wheel + * ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(isPartOfPairs, true);) { + answers.forEachRemaining(answer -> System.out + .println(answer.getTerms().get(0) + " isPartOf " + answer.getTerms().get(1))); } } } From 3d3a414315042204e7dfca5033a33c2a1dfdfb39 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 May 2018 16:36:39 +0200 Subject: [PATCH 0016/1255] added example for converting data from an RDF resource into facts. --- vlog4j-examples/LICENSE.txt | 402 +- vlog4j-examples/README.md | 5 + vlog4j-examples/pom.xml | 87 +- .../rdf/iswc-2016-complete-alignments.rdf | 47752 ++++++++++++++++ .../SkolemChaseExecutionFromToFile.java | 2 + .../owlapi/RestrictedChaseOnOwlOntology.java | 4 +- .../examples/rdf/AddDataFromRDFModel.java | 213 + 7 files changed, 48231 insertions(+), 234 deletions(-) create mode 100644 vlog4j-examples/README.md create mode 100644 vlog4j-examples/src/main/data/rdf/iswc-2016-complete-alignments.rdf create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java diff --git a/vlog4j-examples/LICENSE.txt b/vlog4j-examples/LICENSE.txt index 261eeb9e9..29f81d812 100644 --- a/vlog4j-examples/LICENSE.txt +++ b/vlog4j-examples/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-examples/README.md b/vlog4j-examples/README.md new file mode 100644 index 000000000..789743204 --- /dev/null +++ b/vlog4j-examples/README.md @@ -0,0 +1,5 @@ +This project contains examples of different use-cases of **vlog4j** functionality. +- reasoning with th default Restricted Chase algorithm : RestrictedChaseExecutionInMemory.java +- reasoning with Skolem Chase algorithm : SkolemChaseExecutionFromToFile.java +- converting an OWL ontology into rules and facts: owlapi.RestrictedChaseOnOwlOntology.java +- converting an RDF resource into facts: rdf.AddDataFromRDFModel.java \ No newline at end of file diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 8482fb784..9568a950d 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -1,32 +1,55 @@ - - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.2.0-SNAPSHOT - - - vlog4j-examples - pom - - VLog4j Examples - Contains examples and usage instructions describing the basic functionality of VLog4j - - - - ${project.groupId} - vlog4j-core - ${project.version} - - - ${project.groupId} - vlog4j-owlapi - ${project.version} - - - - + + + + 4.0.0 + + + org.semanticweb.vlog4j + vlog4j-parent + 0.2.0-SNAPSHOT + + + vlog4j-examples + pom + + VLog4j Examples + Contains examples and usage instructions describing the basic functionality of VLog4j + + + + ${project.groupId} + vlog4j-core + ${project.version} + + + ${project.groupId} + vlog4j-owlapi + ${project.version} + + + ${project.groupId} + vlog4j-rdf + ${project.version} + + + + + org.openrdf.sesame + sesame-rio-turtle + + 2.7.16 + + + + + org.openrdf.sesame + sesame-rio-rdfxml + + 2.7.16 + + + + + + diff --git a/vlog4j-examples/src/main/data/rdf/iswc-2016-complete-alignments.rdf b/vlog4j-examples/src/main/data/rdf/iswc-2016-complete-alignments.rdf new file mode 100644 index 000000000..2c198b73a --- /dev/null +++ b/vlog4j-examples/src/main/data/rdf/iswc-2016-complete-alignments.rdf @@ -0,0 +1,47752 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d5cdedb99ce0566bc81fbb85b3d590c8f2ccb819 + + Floriano Scioscia + + + + + + Floriano Scioscia + + Floriano Scioscia + + + + + + + + + + + + + + + + + + + + + + + + + Elena Simperl + + + + + + + + + + + Elena Simperl + + Elena Simperl + + + The University of Adelaide + + + The University of Adelaide + + + + The University of Adelaide + + + + + + + + + + + + + + + + + + + + + + + + + + + + user interfaces + + + large high-resolution displays + + + + ontology alignment + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + user interfaces + + + Producing alignments of highest quality requires ‘humans in the loop’, however, user involvement is currently one of the challenges for the ontology alignment community. Ontology alignment is a cognitively intensive task and could be efficiently supported by user interfaces encompassing well-designed visualizations and interaction techniques. This work investigates the application of large, high-resolution displays to improve users’ cognitive support and identifies several promising directions for their application—improving ontologies’ and alignments’ navigation, supporting users’ thinking process and collaboration. + Producing alignments of highest quality requires ‘humans in the loop’, however, user involvement is currently one of the challenges for the ontology alignment community. Ontology alignment is a cognitively intensive task and could be efficiently supported by user interfaces encompassing well-designed visualizations and interaction techniques. This work investigates the application of large, high-resolution displays to improve users’ cognitive support and identifies several promising directions for their application—improving ontologies’ and alignments’ navigation, supporting users’ thinking process and collaboration. + large high-resolution displays + + + + + + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + ontology alignment + + + ecf25732b95195eb27e8003f30499cd78dd609bf + + + Eamonn Clinton + + + Eamonn Clinton + + + + Eamonn Clinton + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Freddy Brasileiro + cb4a2d442357edba0b13d7e29b0c1ac976ea1ca5 + Freddy Brasileiro + Freddy Brasileiro + + + + + + + + + + + Property Paths + Query containment is one of the building block of query optimization techniques. In the relational world, query containment is a well-studied problem. At the same time it is well-understood that relational queries are not enough to cope with graph-structured data, where one is interested in expressing queries that capture navigation in the graph. This paper contributes a study on the problem of query containment for an expressive class of navigational queries called Extended Property Paths (EPPs). EPPs are more expressive than previous navigational extensions of SPARQL like property paths and nested regular expressions, for which containment has already been studied. We attack the problem of EPPs (and SPARQL with EPPs) containment and provide complexity bounds. + + + Graph Navigational Languages + + Graph Navigational Languages + Property Paths + Containment of Expressive SPARQL Navigational Queries + + Query Containment + Containment of Expressive SPARQL Navigational Queries + Query Containment + + + + Containment of Expressive SPARQL Navigational Queries + + + + + + + + + + Query containment is one of the building block of query optimization techniques. In the relational world, query containment is a well-studied problem. At the same time it is well-understood that relational queries are not enough to cope with graph-structured data, where one is interested in expressing queries that capture navigation in the graph. This paper contributes a study on the problem of query containment for an expressive class of navigational queries called Extended Property Paths (EPPs). EPPs are more expressive than previous navigational extensions of SPARQL like property paths and nested regular expressions, for which containment has already been studied. We attack the problem of EPPs (and SPARQL with EPPs) containment and provide complexity bounds. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + 2016-10-20T13:30:00 + 2016-10-20T13:50:00 + 2016-10-20T13:30:00 + 2016-10-20T13:50:00 + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + Chetana Gavankar, Yuan-Fang Li and Ganesh Ramakrishnan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Alessandra Mileo + Alessandra Mileo + + Alessandra Mileo + + 1447da5207e74c3cb4416cf4b23d7a722621c110 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 08214cd2b712c0f904f1e0e4188c7f9214abd64a + Menna Bakry + + + + + + + Menna Bakry + + + + Menna Bakry + + + + + + + + + + + + + + + + + + + + + + + + + Vidar Klungre + + + Vidar Klungre + + + Vidar Klungre + 9d90d4c92adf2083b473ca5109203c60d0291206 + + + + + + + ABB Corporate Research + ABB Corporate Research + + + + ABB Corporate Research + + + + + + + + + + c9da11def5690717ea77dfaae7d654e32f27aa6b + + Syed Muhammad Ali Hasnain + Syed Muhammad Ali Hasnain + + + + Syed Muhammad Ali Hasnain + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e6af9124752769b83d7517d127d8e8c6b6faa440 + Andrea Mauri + + + Andrea Mauri + + Andrea Mauri + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Machine Learning + + Scalability + Refinement Operators + + + Refinement Operators + + + Planning + + + Linked Data + Link Discovery + Scalable Link Discovery for Modern Data-Driven Applications + + Linked Data + Partial Recall + Machine Learning + + Modern data-driven applications often have to integrate and process large volumes of high-velocity data. To this end, they require fast and accurate Link Discovery solutions. Most Link Discovery frameworks rely on complex link specifications to determine candidates for links. Hence, the main focus of this work lies in the conception, development, implementation and evaluation of time-efficient and scalable Link Discovery approaches based on the link specification paradigm. We address the aforementioned challenges by presenting approaches for (1) time-constrained linking and (2) for the efficient computation and (3) scalable execution of link specifications with applications to periodically updated knowledge bases. The overall result of this thesis will be an open-source framework for link discovery on large volumes of RDF data streams. + Partial Recall + + + Planning + Modern data-driven applications often have to integrate and process large volumes of high-velocity data. To this end, they require fast and accurate Link Discovery solutions. Most Link Discovery frameworks rely on complex link specifications to determine candidates for links. Hence, the main focus of this work lies in the conception, development, implementation and evaluation of time-efficient and scalable Link Discovery approaches based on the link specification paradigm. We address the aforementioned challenges by presenting approaches for (1) time-constrained linking and (2) for the efficient computation and (3) scalable execution of link specifications with applications to periodically updated knowledge bases. The overall result of this thesis will be an open-source framework for link discovery on large volumes of RDF data streams. + + Scalability + Scalable Link Discovery for Modern Data-Driven Applications + Link Discovery + Scalable Link Discovery for Modern Data-Driven Applications + + + + + + + + + + + + + Motoyuki Takaai + + + + + + + + e79c193c77e024e98494681723e3a26df4b3e66a + + + + Motoyuki Takaai + Motoyuki Takaai + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Djellel Eddine Difallah + + + + + Djellel Eddine Difallah + + dad4e489a8bbf598b5133e3f6f129aeee75b8a23 + + + + + Djellel Eddine Difallah + + + + + + + + + + + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + + + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + 2016-10-21T14:30:00 + Thomas Wilmering, György Fazekas and Mark B. Sandler + + 2016-10-21T14:30:00 + + + + + + + + + + + + + + + György Fazekas + + György Fazekas + + 280f9450d37d0a714fe82df4809b7207d8a5daf2 + + + + György Fazekas + + + + + + + + + + + + + + + Paul Buitelaar + + + + + + + + Paul Buitelaar + Paul Buitelaar + + + + + 26abe4bbe6cae6339b5814a89a4cd6aa6786e0e4 + + + + + Natasha Noy + + + + + + + Natasha Noy + + + + Natasha Noy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Flash storage + Flash storage + RDF data processing + + + Linked Data processing for Embedded Devices + + Linked Data processing for Embedded Devices + + Our PhD work aims to a comprehensive, scalable and resourced-awareness +software framework to process RDF data for embedded devices. +In this proposal, we introduce a system architecture supporting RDF storage, +SPARQL query, RDF reasoning and continuous query for RDF stream. The ar- +chitecture is designed to be applicable to embedded systems. For the efficient +performance and scalability, we propose data management techniques adapt- +ing to hardware characteristics of embedded devices. Since computing resources +on embedded devices are constraint, their usage should be context dependent. +Therefore, we work on a resource adaptation model that supports trading off +system performance and device resources depending on their availability. The +adaptation model is based on the resource cost model of the data management +techniques. + + + + Linked Data processing for Embedded Devices + + + Embedded devices + + + RDF data processing + Our PhD work aims to a comprehensive, scalable and resourced-awareness +software framework to process RDF data for embedded devices. +In this proposal, we introduce a system architecture supporting RDF storage, +SPARQL query, RDF reasoning and continuous query for RDF stream. The ar- +chitecture is designed to be applicable to embedded systems. For the efficient +performance and scalability, we propose data management techniques adapt- +ing to hardware characteristics of embedded devices. Since computing resources +on embedded devices are constraint, their usage should be context dependent. +Therefore, we work on a resource adaptation model that supports trading off +system performance and device resources depending on their availability. The +adaptation model is based on the resource cost model of the data management +techniques. + + + Embedded devices + + + + + + Raphaël Troncy + 76b8645ac23d412d99c23dd95e0fbbe092d3f730 + + + + + + + Raphaël Troncy + Raphaël Troncy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 4bdf3d0baced19863517c145b78ba6ebda5bc170 + + + + Florian Haag + + Florian Haag + Florian Haag + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + b68f577fe797be26e8b19e52415aebef09b080c8 + + + Minh Pham + Minh Pham + Minh Pham + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Sheffield Hallam University + + Sheffield Hallam University + + + + + + + Sheffield Hallam University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pasquale Lisena + ef5bf1abcd71032d60cf5daa137efd555cd1f73b + + + Pasquale Lisena + + + + + + + + Pasquale Lisena + + + + + + + + + + Steven de Rooij + 090edbd034d896cce7902e1897fa648a67bdc9fe + + + Steven de Rooij + + Steven de Rooij + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aa792ac2251df0d04c734b9db50c92c72e96eb77 + + + + + Makoto Urakawa + + + + Makoto Urakawa + Makoto Urakawa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Universidad de Talca + + + + + Universidad de Talca + Universidad de Talca + + + + + + Seiji Okajima + + Seiji Okajima + + + + Seiji Okajima + + + + 2e6a2d3efab08563a3213d847fb43eb67524798f + + + + + + + Hong Fang + Hong Fang + Hong Fang + + + + + + + + + + 2ecc4053698dcfe9ec434598ba358e8af5ba2341 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Quan Z. Sheng + + + + Quan Z. Sheng + + + + a5e8af6357bab05259ef6ea1f0f1700d19ccac9c + + Quan Z. Sheng + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Katalin Ternai + + + + + Katalin Ternai + Katalin Ternai + + + 76fc03dc8f50972d1cd8d8538e20ae5c0015838c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vania Dimitrova + + + 44593204b3df5c26b8db090e41972254e32fbcf7 + + Vania Dimitrova + + + + Vania Dimitrova + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tchap.me + + + + + Tchap.me + + + + + Tchap.me + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Bielefeld + University of Bielefeld + + + University of Bielefeld + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + Knowledge Graph + Knowledge Graph + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + acf6ff6e1d431ba72252ebf6ddf752b69ab7d661 + + + + + + Zhenyu Song + Zhenyu Song + Zhenyu Song + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T15:45:00 + 2016-10-18T16:00:00 + 2016-10-18T16:00:00 + 2016-10-18T16:00:00 + Coffee Break + 2016-10-18T15:45:00 + Coffee Break + 2016-10-18T16:00:00 + + + + + + + + + + + + + + + + + + + + + + + Dataset about iswc2016-alignments. + Wed Oct 19 02:38:49 CEST 2016 + + + + + + + + + + + University of Zurich + + University of Zurich + + + + University of Zurich + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Gregoire Burel, Lara Piccolo and Harith Alani + 2016-10-19T21:00:00 + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + Sarah Komla-Ebri + Sarah Komla-Ebri + + + + + + + + + + Sarah Komla-Ebri + + + + + Derek Magee + + + + + + Derek Magee + Derek Magee + + + + + 89c90a68bb231a1353a1718170419d174898621e + + + + + Univiersitat Politecnica de Catalunya + + + + + + + + + + Univiersitat Politecnica de Catalunya + Univiersitat Politecnica de Catalunya + + + + + + + + + + + + + + + Data stream applications are becoming increasingly popular on the web. In these applications, one query pattern is especially prominent: a join between a continuous data stream and some background data (BGD). Oftentimes, the target BGD is large, maintained externally, changing slowly, and costly to query (both in terms of time and money). Hence, practical applications usually maintain a local (cached) view of the relevant BGD. Given that these caches are not updated as part of the transaction modifying the original BGD, they should be maintained under realistic budget constraints (in terms of latency, computation time, and possibly financial cost) to avoid stale data leading to wrong answers. + +This paper proposes to model the join between streams and the BGD as a bipartite graph. By exploiting the graph structure, we keep the quality of results good enough without refreshing the entire cache for each evaluation. We also introduce two extensions to this method: first, we consider both the sliding window (specifying the currently relevant section of the data stream) and the change rate of the BGD to focus on updates that have the longest effect. Second, by considering the future impact of a query to the BGD we propose to sometimes delay updates to provide more fresher answers in future. + +Using an implemented system we empirically show that we can improve result freshness by 93% over baseline algorithms such as Random Selection or Least Recently Updated. + + + + Join + + Data stream applications are becoming increasingly popular on the web. In these applications, one query pattern is especially prominent: a join between a continuous data stream and some background data (BGD). Oftentimes, the target BGD is large, maintained externally, changing slowly, and costly to query (both in terms of time and money). Hence, practical applications usually maintain a local (cached) view of the relevant BGD. Given that these caches are not updated as part of the transaction modifying the original BGD, they should be maintained under realistic budget constraints (in terms of latency, computation time, and possibly financial cost) to avoid stale data leading to wrong answers. + +This paper proposes to model the join between streams and the BGD as a bipartite graph. By exploiting the graph structure, we keep the quality of results good enough without refreshing the entire cache for each evaluation. We also introduce two extensions to this method: first, we consider both the sliding window (specifying the currently relevant section of the data stream) and the change rate of the BGD to focus on updates that have the longest effect. Second, by considering the future impact of a query to the BGD we propose to sometimes delay updates to provide more fresher answers in future. + +Using an implemented system we empirically show that we can improve result freshness by 93% over baseline algorithms such as Random Selection or Least Recently Updated. + + + Budget + + + Budget + Remote Data Access + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + Stream Processing + Remote Data Access + + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + Freshness + Join + Freshness + + Stream Processing + + + + + + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + + + + + + + + + Hassan Saif + + + cfd1b87509b48cac8cacda96266298dd3dee5e0b + + Hassan Saif + + + + + Hassan Saif + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + SPORTAL: Searching for Public SPARQL Endpoints + Syed Muhammad Ali Hasnain, Qaiser Mehmood, Syeda Sana E Zainab and Aidan Hogan + SPORTAL: Searching for Public SPARQL Endpoints + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + Télécom ParisTech University + Télécom ParisTech University + + + + Télécom ParisTech University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DCC, Pontificia Universidad Católica de Chile + DCC, Pontificia Universidad Católica de Chile + + + + + + + + + DCC, Pontificia Universidad Católica de Chile + + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Incorporating API data into SPARQL query answers + 2016-10-19T21:00:00 + Matias Junemann, Juan L. Reutter, Adrian Soto and Domagoj Vrgoc + + 2016-10-19T18:00:00 + + 2016-10-19T18:00:00 + Incorporating API data into SPARQL query answers + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 019eb78d2972455d2a0f957cb9f883d357f9ceeb + + Besnik Fetahu + + + + + Besnik Fetahu + Besnik Fetahu + + + + + + + + + 2016-10-18T10:30:00 + 2016-10-18T11:00:00 + 2016-10-18T11:00:00 + 2016-10-18T11:00:00 + Coffee Break + 2016-10-18T11:00:00 + 2016-10-18T10:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Peter Woollard + + Peter Woollard + + Peter Woollard + + 4bc85d80d63d2d8272da0d51c92f6703d5c283a9 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fujitsu R&D Center Co.,LTD + + + Fujitsu R&D Center Co.,LTD + Fujitsu R&D Center Co.,LTD + + + + + + + + + + + + + + + + + + + + + Robert Meusel + + + + 68ff194fb648073f974e8f6fcabf9dc0440db69b + + + + + Robert Meusel + + Robert Meusel + + + + + + + + + + + + + + + + + + + + + + + + + + question-answering + + natural language processing + + + In Sir Tim Berners-Lee’s seminal article that introduce his vision of the semantic web, one of the use-cases described was a health- related example where health consumers utilized intelligent hand-held devices that aggregated and exchanged health data from the semantic web. Presently, majority of health consumers and patients rely on personal technology and the web to find information and to make personal health decisions. This proposal aims to contribute towards that use-case, specifically in the “hot-bed” issue of human papillomavirus (HPV) vac- cine. The HPV vaccine targets young adults and teens to protect against life-threatening cancers, yet a segment of the public has reservations against the vaccine. I propose an interactive dialogue agent that harness patient-level vaccine information encoded in an ontology that can be “talked to” with a natural language interface using utterances. I aim to pilot this technology in a clinic to assess if patient knowledge about HPV and the vaccine is increased, and if their attitude toward the vaccine is modified as a result of using the interactive agent. + dialogue system + + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + + + question-answering + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + dialogue system + + ontology learning + + conversational agents + vaccine + + In Sir Tim Berners-Lee’s seminal article that introduce his vision of the semantic web, one of the use-cases described was a health- related example where health consumers utilized intelligent hand-held devices that aggregated and exchanged health data from the semantic web. Presently, majority of health consumers and patients rely on personal technology and the web to find information and to make personal health decisions. This proposal aims to contribute towards that use-case, specifically in the “hot-bed” issue of human papillomavirus (HPV) vac- cine. The HPV vaccine targets young adults and teens to protect against life-threatening cancers, yet a segment of the public has reservations against the vaccine. I propose an interactive dialogue agent that harness patient-level vaccine information encoded in an ontology that can be “talked to” with a natural language interface using utterances. I aim to pilot this technology in a clinic to assess if patient knowledge about HPV and the vaccine is increased, and if their attitude toward the vaccine is modified as a result of using the interactive agent. + + natural language processing + ontology + conversational agents + + + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + ontology learning + + ontology + vaccine + + + + + + + + + + + Veronique Volders + + Veronique Volders + + Veronique Volders + 8d314d0023648fdac8f36a93f324aba48ac1525c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Seiya Inagi + + + + + + 26088d83ed5cf55fdfd25e4034b1713d6a88e504 + Seiya Inagi + Seiya Inagi + + + + + + George Drosatos + + + George Drosatos + + + + + + + George Drosatos + + + + + + + Anthony Cohn + + + Anthony Cohn + + fc24c428e8ed4d044040e259830f74cf8cd92971 + + + + + Anthony Cohn + + + + + + Xiang Nan Ren + + + Xiang Nan Ren + + + + + + Xiang Nan Ren + 4e67340de2275d16373425555cb401a3006329ef + + + + + + National and Kapodistrian University of Athens + + + National and Kapodistrian University of Athens + + + + National and Kapodistrian University of Athens + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T14:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + + 2016-10-19T14:20:00 + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + Thomas Rebele, Fabian M. Suchanek, Johannes Hoffart, Joanna Biega, Erdal Kuzey and Gerhard Weikum + + + + + + + + + + + + + + + + + + + + + + + + + + + Université jean Monnet + + + + Université jean Monnet + + Université jean Monnet + + + + + + 90363e0e39c4eef6511649eb6664e2452d6838e9 + + + Zakia Kazi-Aoul + + + + + + Zakia Kazi-Aoul + + + + + Zakia Kazi-Aoul + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + 2016-10-19T21:00:00 + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + + 2016-10-19T18:00:00 + Bernardo Cuenca Grau, Evgeny Kharlamov, Sarunas Marciuska, Dmitriy Zheleznyakov and Marcelo Arenas + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + 2016-10-20T15:30:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + Ontologies (I) + 2016-10-20T15:30:00 + Ontologies (I) + 2016-10-20T16:50:00 + + + + + + + + + + + + + + + Yuzhong Qu + + + + + + + + 57682429d3d2a18d6a9c2c2b0559a2105ca034a1 + + Yuzhong Qu + + Yuzhong Qu + + + + + + + + + + + + + + + + + + + + + + + Daxin Liu + + 9d396783ee8b86ee384709f5fd61c670c62f2bc6 + + + + + Daxin Liu + + + + Daxin Liu + + + + + + + + + + + + + + + + + + + + + + + + Catherine Faron Zucker + + Catherine Faron Zucker + + + ee710ff7a176af85a72d485b18eebf2cd5d37bcb + Catherine Faron Zucker + + + Femke De Backere + Femke De Backere + + + + 429b07376b671ac75b7d20af309bb56042b5a212 + + + Femke De Backere + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Institutt for informatikk, Universitetet i Oslo + + Institutt for informatikk, Universitetet i Oslo + Institutt for informatikk, Universitetet i Oslo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Eugene Siow, Thanassis Tiropanis and Wendy Hall + 2016-10-20T14:10:00 + 2016-10-20T14:10:00 + 2016-10-20T14:10:00 + 2016-10-20T13:50:00 + SPARQL-to-SQL on Internet of Things Databases and Streams + + + SPARQL-to-SQL on Internet of Things Databases and Streams + 2016-10-20T13:50:00 + 2016-10-20T14:10:00 + + + + + + + + + + + + + + + Ontolonomy, LLC. + + + Ontolonomy, LLC. + + + Ontolonomy, LLC. + + + + + + + + + + + + + + + + + + + + + Federal University of Espirito Santo + + Federal University of Espírito Santo + Federal University of Espirito Santo + Federal University of Espirito Santo + + + + + + Federal University of Espírito Santo + Federal University of Espírito Santo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Masahide Naemura + + + + + + e412957400cef06b258ce055f7357fc38c9c2ceb + Masahide Naemura + Masahide Naemura + + + + + + + + + + + + + + + + Ontology learning has been an important research area in the Semantic Web field in the last 20 years. Ontology learning systems generate domain models from data (typically text) using a combination of sophisticated methods. In this poster, we study the use of Google's word2vec to emulate a simple ontology learning system, and compare the results to an existing "traditional" ontology learning system. + + + + Using word2vec to Build a Simple Ontology Learning System + + Ontology learning has been an important research area in the Semantic Web field in the last 20 years. Ontology learning systems generate domain models from data (typically text) using a combination of sophisticated methods. In this poster, we study the use of Google's word2vec to emulate a simple ontology learning system, and compare the results to an existing "traditional" ontology learning system. + + + ontology learning + term extraction + ontology learning + Using word2vec to Build a Simple Ontology Learning System + + Using word2vec to Build a Simple Ontology Learning System + + term extraction + + + + word2vec + + + word2vec + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + Dmitriy Zheleznyakov, Evgeny Kharlamov, Vidar Klungre, Martin G. Skjæveland, Dag Hovland, Martin Giese, Ian Horrocks and Arild Waaler + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 598e7b5cf0a728d043ae5f2549620571d9acf1d6 + + + Takahira Yamaguchi + + + + + + Takahira Yamaguchi + Takahira Yamaguchi + + + + + + + + + + + + + + + + 39a490ff48174beb1442f2023b5352ba604c0949 + + + + + + + Stijn De Pestel + + Stijn De Pestel + + Stijn De Pestel + + + + + + + Diego Calvanese + + + + + Diego Calvanese + + + + Diego Calvanese + 9f41f6f9767c215c484e883dfa80e694c03aef54 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2cdeff9ba575d5e67689bae4f0f86c844d69eca0 + Henning Agt-Rickauer + + + + + Henning Agt-Rickauer + + + + + Henning Agt-Rickauer + + + + + + + 2016-10-20T13:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T13:30:00 + Linked Data Measurement + 2016-10-20T14:50:00 + Linked Data Measurement + + + + + + + + + + + + + + + + + + + + + + + + + + + Giuseppe Loseto + + + Giuseppe Loseto + + 9cceef5443af3aa049422b477bb5b9207decc74f + + Giuseppe Loseto + + + + + + + + + + + + RDF + + RDF4J + Big data + Hadoop + Hadoop + Eclipse RDF4J (formerly known as Sesame) is an open source Java framework for processing RDF data. RDF4J framework is extensible through its Storage And Inference Layer (SAIL) to support various RDF stores and inference engines. Apache HBase is the Hadoop database, a distributed and scalable big data store. It is designed to scale up from single servers to thousands of machines. We have connected RDF4J and HBase to receive an extremely scalable RDF store. + Sesame + HBase + SPARQL + HBase + Scalability + Triplestore + Eclipse RDF4J (formerly known as Sesame) is an open source Java framework for processing RDF data. RDF4J framework is extensible through its Storage And Inference Layer (SAIL) to support various RDF stores and inference engines. Apache HBase is the Hadoop database, a distributed and scalable big data store. It is designed to scale up from single servers to thousands of machines. We have connected RDF4J and HBase to receive an extremely scalable RDF store. + + + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + RDF + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + Sesame + + + + SPARQL + Big data + Triplestore + + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + + + + Scalability + + + RDF4J + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Local Council Decisions as Linked Data: a proof of concept + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Raf Buyle, Pieter Colpaert, Mathias Van Compernolle, Peter Mechant, Veronique Volders, Ruben Verborgh and Erik Mannens + Local Council Decisions as Linked Data: a proof of concept + 2016-10-19T21:00:00 + + + + 2016-10-18T09:00:00 + 2016-10-18T10:30:00 + 2016-10-18T10:30:00 + 2016-10-18T10:30:00 + 2016-10-18T09:00:00 + Opening + 2016-10-18T10:30:00 + Opening + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mihael Arcan + be0fefb0beeda37059e2be8c52db9fe5736886b4 + + + + + + + + Mihael Arcan + + Mihael Arcan + + + + + RIKEN BioResource Center + + + + + RIKEN BioResource Center + + + RIKEN BioResource Center + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + In this paper, we propose the rst system, so-called Open Programming Knowledge Extraction (OPKE), to automatically extract knowledge from programming Question-Answering (QA) communities. OPKE is the rst step of building a programming-centric knowledge base. Data mining and Natural Language Processing techniques are leveraged to identify paraphrased questions and construct structured information. Preliminary evaluation shows the eectiveness of OPKE. + + Programming QA + Open IE + + NLP + Open IE + + NLP + + + + + Kowledge Extraction + + + + + + + Kowledge Extraction + + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + In this paper, we propose the rst system, so-called Open Programming Knowledge Extraction (OPKE), to automatically extract knowledge from programming Question-Answering (QA) communities. OPKE is the rst step of building a programming-centric knowledge base. Data mining and Natural Language Processing techniques are leveraged to identify paraphrased questions and construct structured information. Preliminary evaluation shows the eectiveness of OPKE. + + Towards Building Open Knowledge Base From Programming Question-Answering Communities + Programming QA + + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jean-Paul Calbimonte + + + Jean-Paul Calbimonte + + + Jean-Paul Calbimonte + a0556fac631e917c6c1e1a697fe0e513e8e58f81 + + + + + + + + + + + + + + + 2016-10-19T15:00:00 + 2016-10-19T15:00:00 + + 2016-10-19T14:40:00 + 2016-10-19T15:00:00 + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + 2016-10-19T14:40:00 + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + Dilshod Ibragimov, Katja Hose, Torben Bach Pedersen and Esteban Zimanyi + 2016-10-19T15:00:00 + + + + + + SPARQL query processing + + + Federated querying + Big Data infrastructures + + Federated querying + + + + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + + + SPARQL query processing + + + The ability to cross-link large scale data with each other and with structured Semantic Web data, and the ability to uniformly process Semantic Web and other data adds value to both the Semantic Web and to the Big Data community. This paper presents work in progress towards integrating Big Data infrastructures with Semantic Web technologies, allowing for the cross-linking and uniform retrieval of data stored in both Big Data infrastructures and Semantic Web data. The technical challenges involved in achieving this, pertain to both data and system interoperability: we need a way to make the semantics of Big Data explicit so that they can interlink and we need a way to make it transparent for the client applications to query federations of such heterogeneous systems. The paper presents an extension of the Semagrow federated SPARQL query processor that is able to seamlessly federated SPARQL endpoints, Cassandra databases, and Solr databases, and discusses future directions of this line of work. + + The ability to cross-link large scale data with each other and with structured Semantic Web data, and the ability to uniformly process Semantic Web and other data adds value to both the Semantic Web and to the Big Data community. This paper presents work in progress towards integrating Big Data infrastructures with Semantic Web technologies, allowing for the cross-linking and uniform retrieval of data stored in both Big Data infrastructures and Semantic Web data. The technical challenges involved in achieving this, pertain to both data and system interoperability: we need a way to make the semantics of Big Data explicit so that they can interlink and we need a way to make it transparent for the client applications to query federations of such heterogeneous systems. The paper presents an extension of the Semagrow federated SPARQL query processor that is able to seamlessly federated SPARQL endpoints, Cassandra databases, and Solr databases, and discusses future directions of this line of work. + + + + + Big Data infrastructures + + + + + + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dbe710769d92959350797a5922c0d59a94314050 + + Miriam Fernandez + + + + + + + + + + + Miriam Fernandez + + + Miriam Fernandez + + + + + + + + + + + + + + + + + + + + 3fb810d543b80bd85978ba263cb3cdb8907cb006 + Erhard Rahm + + + + Erhard Rahm + + + Erhard Rahm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + life-science database + + + + + + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + life-science database + Semantic Web + + + + Resource Description Framework + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + + To promote data dissemination and integration of life science +datasets produced in a general research institute, RIKEN, we developed +an infrastructure database named as "RIKEN MetaDatabase", which en- +ables data publication and integration with Resource Description Frame- +work. We implemented simple data managing work +ow, relational data- +base like graphical interface represents data links across laboratories. As +a result, activities of inter-laboratories collaborations and coordination +began to accelerated. Combined with global standardisation activities, +we expect this database can contribute data integration across the world. + + To promote data dissemination and integration of life science +datasets produced in a general research institute, RIKEN, we developed +an infrastructure database named as "RIKEN MetaDatabase", which en- +ables data publication and integration with Resource Description Frame- +work. We implemented simple data managing work +ow, relational data- +base like graphical interface represents data links across laboratories. As +a result, activities of inter-laboratories collaborations and coordination +began to accelerated. Combined with global standardisation activities, +we expect this database can contribute data integration across the world. + Resource Description Framework + + database integration + + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + + database integration + Semantic Web + + + + + + Daniel Hernandez + + + + Daniel Hernandez + + + + + + b0d6d207a9b309ae69e0a4195ae29aefcdc1eebd + + + + Daniel Hernandez + + + + + + + + + + + + + + + a933b3534829209cf3dbd337cd3c3e509d5c00c0 + + + Hiroshi Fujisawa + Hiroshi Fujisawa + + + + + + Hiroshi Fujisawa + + + + + + + + + + + + + + + + Soil Process + + An Ontology of Soil Properties and Processes + Soil Process + + + + + + An Ontology of Soil Properties and Processes + + + + + + + Assessing the Underworld (ATU) is a large interdisciplinary UK research project addressing urban infrastructure challenges, especially how to make streetworks more efficient and sustainable. One of the key challenges it addresses is integrated inter-asset maintenance. As the assets on the surface of the ground (e.g. pavements) and those buried under it (e.g. pipes and cables) are supported by the ground, the properties and processes of soil affect the performance of these assets to a significant degree. In order to make integrated decisions, it is necessary to combine the knowledge and expertise in multiple areas, such as roads, soil, buried assets, sensing, etc. This requires an underpinning knowledge model, in the form of an ontology. Within this context, we present a new ontology for describing soil properties (e.g. soil strength) and processes (e.g. soil compaction), as well as how they affect each other. This ontology can be used to express how the ground affects and is affected by assets buried under the ground or on the ground surface. The ontology is written in OWL 2 and openly available from the University of Leeds data repository: http://doi.org/10.5518/54. + + + An Ontology of Soil Properties and Processes + OWL Ontology + OWL Ontology + + Asset Maintenance + + + Asset Maintenance + + + + Soil Property + Soil Property + + + + Assessing the Underworld (ATU) is a large interdisciplinary UK research project addressing urban infrastructure challenges, especially how to make streetworks more efficient and sustainable. One of the key challenges it addresses is integrated inter-asset maintenance. As the assets on the surface of the ground (e.g. pavements) and those buried under it (e.g. pipes and cables) are supported by the ground, the properties and processes of soil affect the performance of these assets to a significant degree. In order to make integrated decisions, it is necessary to combine the knowledge and expertise in multiple areas, such as roads, soil, buried assets, sensing, etc. This requires an underpinning knowledge model, in the form of an ontology. Within this context, we present a new ontology for describing soil properties (e.g. soil strength) and processes (e.g. soil compaction), as well as how they affect each other. This ontology can be used to express how the ground affects and is affected by assets buried under the ground or on the ground surface. The ontology is written in OWL 2 and openly available from the University of Leeds data repository: http://doi.org/10.5518/54. + + + + + + + + + + + + + + + + Path queries + SPARQL + + + Context-free path queries + + + + Navigational queries + + + Context-free path queries + RDF + + Regular path queries + Navigational graph queries are an important class of queries that can extract implicit binary relations over the nodes of input graphs. Most of the navigational query languages used in the RDF community, e.g. property paths in W3C SPARQL 1.1 and nested regular expressions in nSPARQL, are based on the regular expressions. It is known that regular expressions have limited expressivity; for instance, some natural queries, like same generations-queries} are not expressible with regular expressions. To overcome this limitation, in this paper, we present cfSPARQL, an extension of SPARQL query language equipped with context-free grammars. The cfSPARQL language is strictly more expressive than property paths and nested expressions. The additional expressivity can be used for modelling graph similarities, graph summarization and ontology alignment. Despite the increasing expressivity, we show that cfSPARQL still enjoys a low computational complexity and can be evaluated efficiently. + Context-Free Path Queries on RDF Graphs + + Context-Free Path Queries on RDF Graphs + + RDF + + + + Context-Free Path Queries on RDF Graphs + + + + Regular path queries + + Navigational graph queries are an important class of queries that can extract implicit binary relations over the nodes of input graphs. Most of the navigational query languages used in the RDF community, e.g. property paths in W3C SPARQL 1.1 and nested regular expressions in nSPARQL, are based on the regular expressions. It is known that regular expressions have limited expressivity; for instance, some natural queries, like same generations-queries} are not expressible with regular expressions. To overcome this limitation, in this paper, we present cfSPARQL, an extension of SPARQL query language equipped with context-free grammars. The cfSPARQL language is strictly more expressive than property paths and nested expressions. The additional expressivity can be used for modelling graph similarities, graph summarization and ontology alignment. Despite the increasing expressivity, we show that cfSPARQL still enjoys a low computational complexity and can be evaluated efficiently. + + + + Path queries + SPARQL + + Navigational queries + + + + + + + + + + + Tony Lee + + + + + + + + + + Tony Lee + + Tony Lee + 7feda9d7ed587af441ac90ae67d2c5b09418dddd + + + + + + + + This paper proposes Agriculture Activity Ontology(AAO) as a basis of the core vocabulary of agricultural activity. Since concepts of agriculture activities are formed by the various context such as purpose, means, crop, and field, we organize the agriculture activity ontology as a hierarchy of concepts discriminated by various properties such as purpose, means, crop and field. The vocabulary of agricultural activity is then defined as the subset of the ontology. Since the ontology is consistent, extendable, and capable of some inferences thanks to Description Logics, so the vocabulary inherits these features. The vocabulary is also linked to existing vocabularies such as AGROVOC. It is expected to use in the data format in the agricultural IT system. The vocabulary is adopted as the part of "the guideline for agriculture activity names for agriculture IT systems" issued by Ministry of Agriculture, Forestry and Fisheries (MAFF), Japan. + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + + This paper proposes Agriculture Activity Ontology(AAO) as a basis of the core vocabulary of agricultural activity. Since concepts of agriculture activities are formed by the various context such as purpose, means, crop, and field, we organize the agriculture activity ontology as a hierarchy of concepts discriminated by various properties such as purpose, means, crop and field. The vocabulary of agricultural activity is then defined as the subset of the ontology. Since the ontology is consistent, extendable, and capable of some inferences thanks to Description Logics, so the vocabulary inherits these features. The vocabulary is also linked to existing vocabularies such as AGROVOC. It is expected to use in the data format in the agricultural IT system. The vocabulary is adopted as the part of "the guideline for agriculture activity names for agriculture IT systems" issued by Ministry of Agriculture, Forestry and Fisheries (MAFF), Japan. + agriculture + ontology + + + agriculture + + agronomic sciences + + + agronomic sciences + + + + ontology + + + + knowledge representation + + + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + core vocabulary + core vocabulary + + vocabulary management + + vocabulary management + knowledge representation + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + + + + + + + + + + + + + + + + + + + cf5050096ed30e26541ecb2ea068ab01e915aca8 + Roberto Garcia + + + + + Roberto Garcia + + Roberto Garcia + + + + + + + + 2016-10-20T17:00:00 + 2016-10-20T18:00:00 + 2016-10-20T18:00:00 + 2016-10-20T18:00:00 + Town Hall + 2016-10-20T18:00:00 + Town Hall + 2016-10-20T17:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ian Horrocks + + + + 3361a8a2f71036d7ca03076a41f4d8ae08c71e97 + + + + + + Ian Horrocks + + + Ian Horrocks + + + + + + + + + + + + + + + + + + + + + + + + + + Frank Den Hartog + + + + + ec0d87cac503768feb5676d8910729d845b9881f + Frank Den Hartog + + + + + + Frank Den Hartog + + + + + + + + + + + + + + + + + + + + + + + + + + + + Natural Language Generation + Aggregation + Natural Language Generation + Enriching Answers in Question Answering Systems using Linked Data + Enriched Answer + Linked Data + + Lexicalization + + + + + + Question Answering + + + Enriching Answers in Question Answering Systems using Linked Data + Question Answering + Aggregation + + + Linked Data has emerged as the most widely used and the most powerful knowledge source for Question Answering (QA). Although Question Answering using Linked Data (QALD) fills in many gaps in the traditional QA models, the answers are still presented as factoids. This research introduces an answer presentation model for QALD by employing Natural Language Generation (NLG) to generate natural language descriptions to present an informative answer. The proposed approach employs lexicalization, aggregation, and referring expression generation to build a human-like enriched answer utilizing the triples extracted from the entities mentioned in the question as well as the entities contained in the answer. + + Referring Expression Generation + Referring Expression Generation + Lexicalization + + Enriched Answer + Linked Data has emerged as the most widely used and the most powerful knowledge source for Question Answering (QA). Although Question Answering using Linked Data (QALD) fills in many gaps in the traditional QA models, the answers are still presented as factoids. This research introduces an answer presentation model for QALD by employing Natural Language Generation (NLG) to generate natural language descriptions to present an informative answer. The proposed approach employs lexicalization, aggregation, and referring expression generation to build a human-like enriched answer utilizing the triples extracted from the entities mentioned in the question as well as the entities contained in the answer. + + Enriching Answers in Question Answering Systems using Linked Data + Linked Data + + + + + 2016-10-19T14:40:00 + 2016-10-19T15:00:00 + + 2016-10-19T15:00:00 + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + Haofen Wang, Zhijia Fang, Jorge Gracia, Julia Bosque-Gil and Tong Ruan + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + 2016-10-19T14:40:00 + 2016-10-19T15:00:00 + 2016-10-19T15:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Leipzig + + + University of Leipzig + + + University of Leipzig + + + + + + + + Distributed Queries + Multi-Agent Systems + + + + This study presents a framework to allow human and machine agents to reason and coordinate actions without direct communication mechanisms by sharing distributed Linked Data resources. This framework addresses the problems of querying frequently-updating distributed datasets and guaranteeing transactional consistency. The motivation for this framework comes from the use-case of opportunistic automation of humans-generated procedures. This use-case is based on existing real-world Linked Data representations of human instructions and their integration with machine functionalities. + + + + Multi-Agent Systems + Human-Machine Collaboration over Linked Data + Human-Machine Collaboration over Linked Data + + Distributed Queries + + Human Computation + + Linked Data + + Linked Data + Human-Machine Collaboration + + This study presents a framework to allow human and machine agents to reason and coordinate actions without direct communication mechanisms by sharing distributed Linked Data resources. This framework addresses the problems of querying frequently-updating distributed datasets and guaranteeing transactional consistency. The motivation for this framework comes from the use-case of opportunistic automation of humans-generated procedures. This use-case is based on existing real-world Linked Data representations of human instructions and their integration with machine functionalities. + Human Computation + Human-Machine Collaboration over Linked Data + Human-Machine Collaboration + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 9e46519c300f7b6321d6d2ab61bdfdb5624fa296 + + + + + + + + Ilaria Tiddi + + + Ilaria Tiddi + + Ilaria Tiddi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T13:30:00 + Medical Applications + Medical Applications + 2016-10-21T14:50:00 + + + + + + + Mauro Dragoni + + + Mauro Dragoni + + + cb0fc458c5bd360c5fa92b296c68974ccd1112c5 + + + + Mauro Dragoni + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DBpedia + + + Lexicalization + + + Linked Data + DBpedia encodes massive amounts of open domain knowledge and is growing by accumulating more triples at the same rate as Wikipedia. However, in order to be able to present the knowledge processed using DBpedia, the applications need to present this knowledge often require natural language formulations of these triples. The RealText-lex2 framework offers a scalable platform to transform these triples to natural language sentences using lexicalization patterns. The framework has evolved from its previous version (RealText-lex) and is comprised of four lexicalization pattern mining modules which derive patterns from a training triple collection. These patterns can be then applied on the new triples given that they satisfy a defined set of constraints. + Natural Language Generation + + Linked Data + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + Natural Language Generation + + + + Lexicalization + + DBpedia encodes massive amounts of open domain knowledge and is growing by accumulating more triples at the same rate as Wikipedia. However, in order to be able to present the knowledge processed using DBpedia, the applications need to present this knowledge often require natural language formulations of these triples. The RealText-lex2 framework offers a scalable platform to transform these triples to natural language sentences using lexicalization patterns. The framework has evolved from its previous version (RealText-lex) and is comprised of four lexicalization pattern mining modules which derive patterns from a training triple collection. These patterns can be then applied on the new triples given that they satisfy a defined set of constraints. + + + + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + DBpedia + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Scalable Link Discovery for Modern Data-Driven Applications + 2016-10-18T15:00:00 + 2016-10-18T15:15:00 + + Scalable Link Discovery for Modern Data-Driven Applications + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T15:00:00 + Kleanthi Georgala + + + + + + + + + + + + + + + + + + Philipp Cimiano + + + + + + Philipp Cimiano + Philipp Cimiano + + + 4d4ded20b46ca7280eb79b3bffdec9c8722e8335 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Leeds + + University of Leeds + University of Leeds + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Amelie Gyrard + b28fc7fb3ee28ba063dec452b9d2a876878d6728 + + Amelie Gyrard + + + + + + + + Amelie Gyrard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 3cf94eccca55ab0ebd77a50c180bdc41fbfb30cc + Giuseppe De Giacomo + + + + + Giuseppe De Giacomo + Giuseppe De Giacomo + + + + + + + + + + + + + + German University in Cairo + + + + + + + German University in Cairo + + + + German University in Cairo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Efstratios Sygkounas + + + + + Efstratios Sygkounas + 82bc09d096246a18d6b76011c4fb2388edc45289 + + Efstratios Sygkounas + + + + + + 05aad93467b2184dff5726c93aea878d6c10736a + Gerhard Weikum + Gerhard Weikum + + + + + + + + + Gerhard Weikum + + + + + + Joscha Jäger + + + + + + Joscha Jäger + + b238a1a77f31130d9dff2a6e9931eb50c92ccb05 + Joscha Jäger + + + + + + + + + + + + + + + + + + + + Julia Bosque-Gil + + + + + + + Julia Bosque-Gil + c77a31610a92e47a391e24fcf6cbd73d0c3e1faa + + + + Julia Bosque-Gil + + + + 2016-10-19T15:20:00 + + 2016-10-19T15:00:00 + Xiaowang Zhang, Zhiyong Feng, Xin Wang, Guozheng Rao and Wenrui Wu + + Context-Free Path Queries on RDF Graphs + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:00:00 + Context-Free Path Queries on RDF Graphs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + semantic audio analysis + Semantic Web technologies + linked open data + + linked open data + + music information retrieval + + semantic audio analysis + Feature extraction algorithms in Music Informatics aim at deriving statistical and semantic information directly from audio signals. These may be ranging from energies in several frequency bands to musical information such as key, chords or rhythm. There is an increasing diversity and complexity of features and algorithms in this domain and applications call for a common structured representation to facilitate interoperability, reproducibility and machine interpretability. We propose a solution relying on Semantic Web technologies that is designed to serve a dual purpose (1) to represent computational workflows of audio features and (2) to provide a common structure for feature data to enable the use of Open Linked Data principles and technologies in Music Informatics. The Audio Feature Ontology is based on the analysis of existing tools and music informatics literature, which was instrumental in guiding the ontology engineering process. The ontology provides a descriptive framework for expressing different conceptualisations of the audio feature extraction domain and enables designing linked data formats for representing feature data. In this paper, we discuss important modelling decisions and introduce a harmonised ontology library consisting of modular interlinked ontologies that describe the different entities and activities involved in music creation, production and publishing. + + Feature extraction algorithms in Music Informatics aim at deriving statistical and semantic information directly from audio signals. These may be ranging from energies in several frequency bands to musical information such as key, chords or rhythm. There is an increasing diversity and complexity of features and algorithms in this domain and applications call for a common structured representation to facilitate interoperability, reproducibility and machine interpretability. We propose a solution relying on Semantic Web technologies that is designed to serve a dual purpose (1) to represent computational workflows of audio features and (2) to provide a common structure for feature data to enable the use of Open Linked Data principles and technologies in Music Informatics. The Audio Feature Ontology is based on the analysis of existing tools and music informatics literature, which was instrumental in guiding the ontology engineering process. The ontology provides a descriptive framework for expressing different conceptualisations of the audio feature extraction domain and enables designing linked data formats for representing feature data. In this paper, we discuss important modelling decisions and introduce a harmonised ontology library consisting of modular interlinked ontologies that describe the different entities and activities involved in music creation, production and publishing. + + + Ontological representation of audio features + + Ontological representation of audio features + Semantic Web technologies + + + Ontological representation of audio features + + + + + music information retrieval + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 7fc9c74bf9b38cba0a03f49609524524e9a449e1 + + + Pierpaolo Tommasi + + + + + + + + + Pierpaolo Tommasi + + Pierpaolo Tommasi + + + + + + + + + + + + + + + + Jongmin Lee + + + + + + + + + + Jongmin Lee + c4c03cb475e7a67eada70ea6dea388ed7d5b64c6 + + Jongmin Lee + + + + + + + + + + + + + + + + + Linked Linguistic Data + + + + + biological pathways + Sembank + + Abstract Meaning Representation + + + biological pathways + + Sembank + Linked Linguistic Data + Abstract Meaning Representation + + + Abstract Meaning Representations as Linked Data + + AMR + Significant advances in Natural Language Processing (NLP) research are fostered when high-quality annotated corpora were provided for general use. In an effort to develop a sembank (i.e., an annotated corpus dedicated to capturing the semantic meaning of a large set of annotated sentences), NLP researchers have developed the Abstract Meaning Representation (AMR) formulation. Each AMR is a rooted, labeled graph that represents the semantics of a single sentence. Nodes in the core AMR graph represent concepts/entities (such as nouns, PropBank frames, etc.) and edges represent relations between concepts, (such a frame-specific arguments, roles, etc.). AMRs have been used to annotate corpora of classic books, newstext and the biomedical research literature. Research is progressing on creating automatic parsers to generate AMRs directly from textual input. In the work described here, we map the AMR representation to a linked data format (AMR-LD), adopting the ontological formulation of the underlying AMR faithfully. We describe the process of generating AMR-LD data from standard AMRs derived from biomedical research articles, including mapping named entities to well-known linked-data resources, such as Uniprot and PubChem, as well as an open-source software to convert AMR data to RDF. We describe the benefits of AMR-LD, including convenient analysis using SPARQL queries and ontology inferences, and embedding into the web of Linked Data. Finally, we discuss the possible impact of semantic web representations that are directly derived from natural language. + + Abstract Meaning Representations as Linked Data + Significant advances in Natural Language Processing (NLP) research are fostered when high-quality annotated corpora were provided for general use. In an effort to develop a sembank (i.e., an annotated corpus dedicated to capturing the semantic meaning of a large set of annotated sentences), NLP researchers have developed the Abstract Meaning Representation (AMR) formulation. Each AMR is a rooted, labeled graph that represents the semantics of a single sentence. Nodes in the core AMR graph represent concepts/entities (such as nouns, PropBank frames, etc.) and edges represent relations between concepts, (such a frame-specific arguments, roles, etc.). AMRs have been used to annotate corpora of classic books, newstext and the biomedical research literature. Research is progressing on creating automatic parsers to generate AMRs directly from textual input. In the work described here, we map the AMR representation to a linked data format (AMR-LD), adopting the ontological formulation of the underlying AMR faithfully. We describe the process of generating AMR-LD data from standard AMRs derived from biomedical research articles, including mapping named entities to well-known linked-data resources, such as Uniprot and PubChem, as well as an open-source software to convert AMR data to RDF. We describe the benefits of AMR-LD, including convenient analysis using SPARQL queries and ontology inferences, and embedding into the web of Linked Data. Finally, we discuss the possible impact of semantic web representations that are directly derived from natural language. + + + + + + AMR + Abstract Meaning Representations as Linked Data + + + + + + + + + + + + + + + + + + + + + + + + 4a5a465327d5cdbc52f1c12e13be99a556cde78d + + + + Sören Auer + Sören Auer + + Sören Auer + + + + + + + + + + + + + + + + + + + + + + + + British Geological Survey + British Geological Survey + + + British Geological Survey + + + + + + + + + Guohui Xiao + + + + + cf84eed127219398853a099856dcc752b11584d1 + + + + + + Guohui Xiao + Guohui Xiao + + + + + + + + + + + + + + + + + + + + + + ADAS Ontology + Map Converter + + + + Map Converter + + Sophisticated digital map is an essential resource for intelligent vehicles to localize and retrieve environment information. However, the open map source do not contain enough information for decision making during autonomous driving. Although comprehensive commercial map data can provide precise map knowledge, the data format is not in a machine-readable format. Therefore, we retrieve useful knowledge from high-precision commercial map and convert it into ontology based data to help intelligent vehicles perceive driving environment and make decisions at various traffic scenarios. Other than developing deci- sion making systems, the converted map data can be used as a golden standard for evaluating traffic sign detection, road mark detection, and automatic map construction. + + An Ontology based Map Converter for Intelligent Vehicles + + + Sophisticated digital map is an essential resource for intelligent vehicles to localize and retrieve environment information. However, the open map source do not contain enough information for decision making during autonomous driving. Although comprehensive commercial map data can provide precise map knowledge, the data format is not in a machine-readable format. Therefore, we retrieve useful knowledge from high-precision commercial map and convert it into ontology based data to help intelligent vehicles perceive driving environment and make decisions at various traffic scenarios. Other than developing deci- sion making systems, the converted map data can be used as a golden standard for evaluating traffic sign detection, road mark detection, and automatic map construction. + ADAS Ontology + An Ontology based Map Converter for Intelligent Vehicles + An Ontology based Map Converter for Intelligent Vehicles + Intelligent Vehicles + + Intelligent Vehicles + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mohsen Taheriyan, Craig Knoblock, Pedro Szekely and José Luis Ambite + 2016-10-21T15:50:00 + + 2016-10-21T16:10:00 + Leveraging Linked Data to Discover Semantic Relations within Data Sources + 2016-10-21T16:10:00 + Leveraging Linked Data to Discover Semantic Relations within Data Sources + 2016-10-21T16:10:00 + 2016-10-21T16:10:00 + + + 2016-10-21T15:50:00 + + + + + + + + + + + + + Wei Emma Zhang + Wei Emma Zhang + + + + + + Wei Emma Zhang + + 72854449895b07f7cb8bd87798b33aacad619024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + + + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + History Learning + + History Learning + + + Linked Open Data + Semantic Open Learning Space + + + + Question Generation + + Semantic Open Learning Space + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + Linked Open Data + + Question Generation + The purpose of this research is to use Linked Open Data (LOD) to support history learning on the Internet. The main issue to create meaningful content-dependent advice for learners is that the system requires an understanding of the learning domain. The learners use the Semantic Open Learning Space (SOLS) to create a machine-understandable concept map that represent their knowledge. SOLS is able to dynamically generate questions depending on each learner’s concept map. The system uses history domain ontologies to generate questions that aim to help learners develop their deep historical considerations. An evaluation showed that the learners using the question generation function could express deeper historical considerations after learning. + The purpose of this research is to use Linked Open Data (LOD) to support history learning on the Internet. The main issue to create meaningful content-dependent advice for learners is that the system requires an understanding of the learning domain. The learners use the Semantic Open Learning Space (SOLS) to create a machine-understandable concept map that represent their knowledge. SOLS is able to dynamically generate questions depending on each learner’s concept map. The system uses history domain ontologies to generate questions that aim to help learners develop their deep historical considerations. An evaluation showed that the learners using the question generation function could express deeper historical considerations after learning. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Martin Koch + + Martin Koch + 931600abfa52a6886c6bd1f7c0017d5ce669fc41 + Martin Koch + + + + + + + + + + + + + + + + Distributed Computing + + + + + + + RDF + Semantic Web + + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. In this paper, we describe the design of an RSP engine that is built upon state of the art Big data frameworks, namely Apache Kafka and Apache Spark. Together, they support the implementation of a production-ready RSP engine that guarantees scalability, fault-tolerance, high availability, low latency and high throughput. Moreover, we highlight that the Spark framework considerably eases the implementation of complex applications requiring libraries as diverse as machine learning, graph processing, query processing and stream processing. + + + RDF + Distributed Computing + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. In this paper, we describe the design of an RSP engine that is built upon state of the art Big data frameworks, namely Apache Kafka and Apache Spark. Together, they support the implementation of a production-ready RSP engine that guarantees scalability, fault-tolerance, high availability, low latency and high throughput. Moreover, we highlight that the Spark framework considerably eases the implementation of complex applications requiring libraries as diverse as machine learning, graph processing, query processing and stream processing. + + + Stream Processing + + + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + + + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + RSP + + + + + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + Stream Processing + + Semantic Web + RSP + + + + + + + + + + + + Xiongnan Jin + + Xiongnan Jin + + + + Xiongnan Jin + + + + + + + + 3e563a2248b8a0a00f09e5e6563f6efbc97d5be9 + + + RDF aggregate view + + + + + + + + + + During the past couple of years, more and more data has been published as native RDF datasets. In this setup, both the size of the datasets and the need to process aggregate queries represent challenges for standard SPARQL query processing techniques. To overcome these limitations, materialized views can be created and used as a source of precomputed partial results during query processing. However, materialized view techniques, as proposed in relational databases, do not support RDF specifics, such as incompleteness and the need to support implicit (derived) information. Therefore, to overcome these challenges, this paper proposes MARVEL – the approach consisting of a view selection algorithm based on an RDF-specific cost model, a view definition syntax. and an algorithm for rewriting SPARQL queries using materialized RDF views. The experimental evaluation shows that the approach can improve query response time by more than an order of magnitude and is able to efficiently handle RDF specifics. + analytical query processing + SPARQL 1.1 + + analytical query processing + + SPARQL 1.1 + + + + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + During the past couple of years, more and more data has been published as native RDF datasets. In this setup, both the size of the datasets and the need to process aggregate queries represent challenges for standard SPARQL query processing techniques. To overcome these limitations, materialized views can be created and used as a source of precomputed partial results during query processing. However, materialized view techniques, as proposed in relational databases, do not support RDF specifics, such as incompleteness and the need to support implicit (derived) information. Therefore, to overcome these challenges, this paper proposes MARVEL – the approach consisting of a view selection algorithm based on an RDF-specific cost model, a view definition syntax. and an algorithm for rewriting SPARQL queries using materialized RDF views. The experimental evaluation shows that the approach can improve query response time by more than an order of magnitude and is able to efficiently handle RDF specifics. + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + + RDF aggregate view + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Universität Bremen + + + Universität Bremen + + + + + Universität Bremen + + + + + + + + + + + + + + + + + + + + + + + + M. Tamer Ozsu + + + M. Tamer Ozsu + b80d152f81024189c05d26b1934503a29488f423 + M. Tamer Ozsu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Heiko Paulheim + + + + + + + + Heiko Paulheim + + + Heiko Paulheim + + + + 8fe63debbbaf9daa6d4def3ebfdf5e0d6ac2d368 + + + + We describe a DBpedia extractor materializing as linked data the editing history of Wikipedia pages to support historical queries and indicators. + DBpedia + + Materializing the editing history of Wikipedia as linked Data in DBpedia + Wikipedia + + + Editing history + + We describe a DBpedia extractor materializing as linked data the editing history of Wikipedia pages to support historical queries and indicators. + Wikipedia + Materializing the editing history of Wikipedia as linked Data in DBpedia + + Materializing the editing history of Wikipedia as linked Data in DBpedia + + + + + + Editing history + + + DBpedia + + + + + + + + + + + + + + + + + + + + + + + + Daniel Faria + + c9c7a7da22700c89a63f7cc4c03d49e229730308 + + + + Daniel Faria + + + + + + + Daniel Faria + + + + + + + + + + + + + Dmitriy Zheleznyakov + + + + + + + + Dmitriy Zheleznyakov + + ae369e4d41a81ab112ac21fe44b96510e4ff7f7c + + + Dmitriy Zheleznyakov + + + + 2016-10-21T13:50:00 + Anthony Potter, Boris Motik, Yavor Nenov and Ian Horrocks + 2016-10-21T13:50:00 + 2016-10-21T13:50:00 + + 2016-10-21T13:30:00 + Distributed RDF Query Answering with Dynamic Data Exchange + + 2016-10-21T13:50:00 + Distributed RDF Query Answering with Dynamic Data Exchange + 2016-10-21T13:30:00 + + + + + + + + + + + + + + + + + + + + + + computational creativity + description logics + Can you imagine... a language for combinatorial creativity? + Can you imagine... a language for combinatorial creativity? + Combinatorial creativity combines existing concepts in a novel way in order to produce a new concept. For example, we can imag- ine jewelry that measures blood pressure. For this, we would combine the concept of jewelry with the capabilities of medical devices. Combinato- rial creativity can be used to develop new business ideas, to find plots for books or movies, or simply to disrupt conventional thinking. In this paper, we propose a formal language for combinatorial creativity, based on description logics. We show that our language can be used to model existing inventions and (to a limited degree) to generate new concepts. + + + + + Can you imagine... a language for combinatorial creativity? + + Combinatorial creativity combines existing concepts in a novel way in order to produce a new concept. For example, we can imag- ine jewelry that measures blood pressure. For this, we would combine the concept of jewelry with the capabilities of medical devices. Combinato- rial creativity can be used to develop new business ideas, to find plots for books or movies, or simply to disrupt conventional thinking. In this paper, we propose a formal language for combinatorial creativity, based on description logics. We show that our language can be used to model existing inventions and (to a limited degree) to generate new concepts. + + + + + + + + description logics + + ontologies + + + + + computational creativity + + + ontologies + + + + + + + + + + + + + + University of Electro-Communications + + University of Electro-Communications + + + + + University of Electro-Communications + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dilshod Ibragimov + + + + + + + + + Dilshod Ibragimov + + 659d8ce7d16c4bb19fb0caab06c70b582964fadd + + Dilshod Ibragimov + + + + + + + + + + + + + + + + + + + + Materialization + SERVICE + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + + + SPARQL Endpoint + RDF stream processing + In this paper, we propose a proactive replication of Linked Data for RDF Stream Processing. Our solution achieves a fast query processing by replicating subsets of remote RDF datasets before query evaluation. To construct the replication process effectively, we present an update estimation model to handle the changes in updates over time. With the update estimation model, we re-compose instances of the replication process in response to some problems, i.e., the outdated data. +Finally, we conduct exhaustive tests with a real-world dataset to verify +our solution. + + + Materialization + + Replication + + + + + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + + + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + + SERVICE + + + + + RDF stream processing + SPARQL Endpoint + + + + + In this paper, we propose a proactive replication of Linked Data for RDF Stream Processing. Our solution achieves a fast query processing by replicating subsets of remote RDF datasets before query evaluation. To construct the replication process effectively, we present an update estimation model to handle the changes in updates over time. With the update estimation model, we re-compose instances of the replication process in response to some problems, i.e., the outdated data. +Finally, we conduct exhaustive tests with a real-world dataset to verify +our solution. + Replication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + National University of Ireland, Galway + + National University of Ireland, Galway + + + + + National University of Ireland, Galway + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Johannes Hoffart + Johannes Hoffart + + + Johannes Hoffart + + + + + + + bb43cd4c4209a7e7c368ba917ec884f8947ae595 + + + + + + + + + + + + University Politehnica of Bucharest + University Politehnica of Bucharest + + University Politehnica of Bucharest + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Hideaki Takeda + Hideaki Takeda + + + + b89c2a9de1b2b9c0b16d4d1e0bc33ca371f25e46 + Hideaki Takeda + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Valentina Ivanova + + + Valentina Ivanova + + + + + 7a322ceb995275257234fa3abcb633fb972069b0 + + + + Valentina Ivanova + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université Paris-Est LIGM + Université Paris-Est LIGM + + Université Paris-Est LIGM + + + + + + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T14:00:00 + Querying + Querying + + + + + + + + + + + + + + + + + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + 2016-10-19T21:00:00 + Ian Harrow, Martin Romacker, Andrea Splendiani, Stefan Negru, Peter Woollard, Scott Markel, Yasmin Alam-Faruque, Martin Koch, Erfan Younesi, James Malone and Ernesto Jimenez-Ruiz + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lingkun Xue + + + + + + + Lingkun Xue + Lingkun Xue + + + + ed856da7f2c572db81f9a3f26ef0e15ca51b326e + + + + + + + Martin Serrano + 6c2abeed98b1778ba872f0ba32617cf7e6c58108 + + Martin Serrano + + + + + + Martin Serrano + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + James Malone + + + d515952bc1adc58048cad9b6b7c673ac7cb26cd1 + James Malone + + James Malone + + + + + + + + + + Université Côte d’Azur, CNRS, INRIA, I3S + + Université Côte d’Azur, CNRS, INRIA, I3S + + Université Côte d’Azur, CNRS, INRIA, I3S + + + + + + Quantified Self + Data integration + + Quantified Self + + + + + + The assessment of risk in medicine is a crucial task, depending on scientific knowledge derived by rigorous clinical studies regarding the (quantified) factors affecting biological changes, as well as on particular knowledge about the current status of a particular patient. Existing non-semantic risk prediction tools are typically based on hardcoded scientific knowledge, and only cover a very limited range of patient states. This makes them rapidly out of date, and limited in application, particularly for patients with co-morbidities (multiple co-occurring conditions). Semantic Web and Quantified Self technologies make it possible to address this task in a much more principled way, to maximise knowledge and data reuse and minimise maintenance requirements while enabling new and sophisticated applications involving widely-available biometric sensors. + +We present a framework for calculating clinical risk predictions for patients based on automatically-gathered biometric data. This framework relies on generic, reusable ontologies for representing clinical risk, and sensor readings, and reasoning to support the integration of data represented according to these ontologies. This integration makes novel use of Semantic Web technologies, and supports straightforward extension and maintenance by medical professionals. The framework is evaluated in terms of its predictions, extensibility and ease of use for domain experts. + + + + + eHealth + + + eHealth + + + + + + + + + Integrating medical scientific knowledge with the semantically Quantified Self + + Integrating medical scientific knowledge with the semantically Quantified Self + + Data integration + + + Integrating medical scientific knowledge with the semantically Quantified Self + + + The assessment of risk in medicine is a crucial task, depending on scientific knowledge derived by rigorous clinical studies regarding the (quantified) factors affecting biological changes, as well as on particular knowledge about the current status of a particular patient. Existing non-semantic risk prediction tools are typically based on hardcoded scientific knowledge, and only cover a very limited range of patient states. This makes them rapidly out of date, and limited in application, particularly for patients with co-morbidities (multiple co-occurring conditions). Semantic Web and Quantified Self technologies make it possible to address this task in a much more principled way, to maximise knowledge and data reuse and minimise maintenance requirements while enabling new and sophisticated applications involving widely-available biometric sensors. + +We present a framework for calculating clinical risk predictions for patients based on automatically-gathered biometric data. This framework relies on generic, reusable ontologies for representing clinical risk, and sensor readings, and reasoning to support the integration of data represented according to these ontologies. This integration makes novel use of Semantic Web technologies, and supports straightforward extension and maintenance by medical professionals. The framework is evaluated in terms of its predictions, extensibility and ease of use for domain experts. + + + + + + + + + + + + + + + + + + + + 22f4821ed44c568658a0b3b43d0429314a7b10f2 + + + Mariano Rodríguez Muro + Mariano Rodríguez Muro + Mariano Rodriguez-Muro + + + + + Mariano Rodríguez Muro + Mariano Rodriguez-Muro + + + + Mariano Rodriguez-Muro + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ioana Manolescu + + + e9f137020be7d1c2124d61b9ea8d1e143b10e8c8 + + Ioana Manolescu + Ioana Manolescu + + + + + + + + + Yannis Kotidis + + + + + + Yannis Kotidis + + + + 8bb24690f6265676208438b68e9bf820dbbfd89b + Yannis Kotidis + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Norman Paton + + + Norman Paton + + + a5e01a63368506f602085790ffcc24042173c804 + + + Norman Paton + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Filip Minic + + + + + + + Filip Minic + Filip Minic + 4f8934e8318c2206a2ea97a5f943f7965842c155 + + + + + + + + + + + + + + + + + + + + + + + + + + Eva Fernandez + + + + + Eva Fernandez + + Eva Fernandez + + + + + 2286d3dadbeee380f6ad45b55e9f889fe859ef65 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 234fa98435383e615aae1ea20923df36499110e4 + + + + + Michele Ruta + + + + + + + + Michele Ruta + Michele Ruta + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mohsen Taheriyan + + + + + Mohsen Taheriyan + + + + c4790d48328a9f0b42b299a6b6135c0cb9ce7c4e + Mohsen Taheriyan + + + + + + + Kyushu Institute of Technology + + + + + Kyushu Institute of Technology + + + + Kyushu Institute of Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + RDF + + + + + Parallel computing + + Many existing approaches have been proposed to solve subgraph matching problem based on filter-and-refine strategy. The efficiency of those existing serial approaches relies on the computational capabilities of CPU. In this paper, we propose an RDF subgraph matching algorithm based on type-isomorphism using GPU since GPU has higher computational performance, more scalability, and lower price than CPU. Firstly, we present a concurrent matching model for type-isomorphism so that subgraph matching can be tackled in a parallel way. Secondly, we develop a parallel algorithm for capturing our proposed concurrent matching model and implement a prototype called IRSMG using GPU. Finally, we evaluate IRSMG on the benchmark datasets LUBM. The experiments show that IRSMG significantly outperforms the state-of-the-art algorithms on the CPU. + + Parallel computing + Many existing approaches have been proposed to solve subgraph matching problem based on filter-and-refine strategy. The efficiency of those existing serial approaches relies on the computational capabilities of CPU. In this paper, we propose an RDF subgraph matching algorithm based on type-isomorphism using GPU since GPU has higher computational performance, more scalability, and lower price than CPU. Firstly, we present a concurrent matching model for type-isomorphism so that subgraph matching can be tackled in a parallel way. Secondly, we develop a parallel algorithm for capturing our proposed concurrent matching model and implement a prototype called IRSMG using GPU. Finally, we evaluate IRSMG on the benchmark datasets LUBM. The experiments show that IRSMG significantly outperforms the state-of-the-art algorithms on the CPU. + Type-isomorphism + Type-isomorphism + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + Subgraph matching + RDF + + + + GPU + + + Subgraph matching + + GPU + + + + + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + + + + + + + + + + + + + + + + + + + + + + + + + d1822505990c7a62874033c9a382d43f94785308 + + + + + Boris Motik + Boris Motik + + + Boris Motik + + + + + + + + Norwegian University of Science and Technology + + + + + + Norwegian University of Science and Technology + Norwegian University of Science and Technology + + + + + + + + + + + + + + + + + + + + University of Waterloo + + + University of Waterloo + + + University of Waterloo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Damian Bursztyn + + + Damian Bursztyn + + + + Damian Bursztyn + + + 2de4e71b32b777b0d9eff0492cd34ab355bc5edd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Springer Nature + Springer Nature + + + + + + Springer Nature + + + + + + + + + + + + + + + + + + + + + e8a116fccc0d75e8b1797cade46ff5740e8b20ca + Wenrui Wu + + + + + + + + Wenrui Wu + + Wenrui Wu + + + + + + + + + + + + + Fuji Xerox Co., Ltd + Fuji Xerox Co., Ltd + + + + Fuji Xerox Co., Ltd + + + + + + + Michel Héon + + + + + Michel Héon + + + + + Michel Héon + 9543b2410d183477bd2c859301c944c89bad147d + + + + + + + + + + 0fb8b8ec11b797cb181f04f8c7534a39dd42812c + Erik Mannens + + Erik Mannens + + Erik Mannens + + + + + + + + + + + + + + + + + + + + + + + + Norio Kobayashi + + + 676e9c8a337fe5ec7513036db9e3656eca935258 + + + Norio Kobayashi + + + + + Norio Kobayashi + + + + + Khalil Drira + + ee480c269a9e8628c54b709d359284e19d031fb4 + + + Khalil Drira + + + + + + + + Khalil Drira + + + + Alo Allik + + + + + Alo Allik + + + + Alo Allik + + 004932ce9922e1329c79f54f34944683e8f67189 + + + + + + + + + + + + + + + + + + + + + + Prediction + Prediction + + + + + + + Ontology Reasoning + Energy + Energy + + The unprecedented growth in mobile devices, combined with advances in Semantic Web Technologies, has given birth to opportunities for more intelligent systems on-the-go. Limited resources of mobile devices, especially energy, demand approaches to make mobile reasoning more applicable. While Mobile-Cloud integration is a promising method for harnessing the power of semantic technologies in the mobile infrastructure, it is an open question on deciding when to reason with ontologies on mobile devices. In this paper, we introduce an energy consumption prediction mechanism for ontology reasoning on mobile devices, which allows analysis of feasibility of ontology reasoning on mobile devices in terms of energy consumption. The prediction models contributes to mobile-cloud integration and helps improve further development of ontology and semantic solutions in general. + Semantic Web + + Ontology Reasoning + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + Random Forests + The unprecedented growth in mobile devices, combined with advances in Semantic Web Technologies, has given birth to opportunities for more intelligent systems on-the-go. Limited resources of mobile devices, especially energy, demand approaches to make mobile reasoning more applicable. While Mobile-Cloud integration is a promising method for harnessing the power of semantic technologies in the mobile infrastructure, it is an open question on deciding when to reason with ontologies on mobile devices. In this paper, we introduce an energy consumption prediction mechanism for ontology reasoning on mobile devices, which allows analysis of feasibility of ontology reasoning on mobile devices in terms of energy consumption. The prediction models contributes to mobile-cloud integration and helps improve further development of ontology and semantic solutions in general. + + Random Forests + Mobile Computing + Semantic Web + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + Mobile Computing + + + + + + bb18daf2c3096e3c06a42caad9d2fe91a4de88ba + + + + + + + + Carlos Rojas + Carlos Rojas + + + + Carlos Rojas + + + + + + + + + + + + + + + + + + + + + + + Matias Junemann + Matias Junemann + + + + + + + + + 7ec0278bcf2e5b6612e93d70e8b059ee3d0bc99e + Matias Junemann + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Olaf Hartig + + + + 654ca904f7c4dfa438e672f559b02ca8c6aaae22 + Olaf Hartig + Olaf Hartig + + + + + + + Qingliang Miao + + eef81157d6c10276a138252a2fd96103174d8080 + + + + + + + Qingliang Miao + + Qingliang Miao + + + + + + + + + + + + + + + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + Interaction + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + Interaction + + + + 2016-10-19T12:00:00 + Affective Graphs: The Visual Appeal of Linked Data + 2016-10-19T12:20:00 + Suvodeep Mazumdar, Daniela Petrelli, Khadija Elbedweihy, Vitaveska Lanfranchi and Fabio Ciravegna + 2016-10-19T12:20:00 + + + 2016-10-19T12:20:00 + 2016-10-19T12:00:00 + Affective Graphs: The Visual Appeal of Linked Data + 2016-10-19T12:20:00 + + + + Ontologies for Knowledge Graphs: Breaking the Rules + tuple-generating dependencies + + + Ontologies for Knowledge Graphs: Breaking the Rules + Datalog + + Ontologies for Knowledge Graphs: Breaking the Rules + Datalog + finite expansion set + + finite expansion set + functional dependency + bounded treewidth set + + bounded treewidth set + + finite unification set + + + finite unification set + + tuple-generating dependencies + Large-scale knowledge graphs (KGs) abound in industry and academia. +They provide a unified format for integrating information sources, +aided by standards such as, e.g., the W3C RDB to RDF Mapping Language. +Meaningful semantic integration, however, is much harder than +syntactic alignment. Ontologies could be an interoperable and +declarative solution to this task. At a closer look, however, we find +that popular ontology languages, such as OWL and Datalog, cannot +express even the most basic relationships on the normalised data +format of KGs. Existential rules are more powerful, but may make +reasoning undecidable, and normalising them to suit KGs can destroy +syntactic restrictions that ensure decidability and low complexity. We +study this issue for several classes of existential rules and derive more +general syntactic criteria to recognise well-behaved rule-based ontologies +over knowledge graphs. + functional dependency + + + + Large-scale knowledge graphs (KGs) abound in industry and academia. +They provide a unified format for integrating information sources, +aided by standards such as, e.g., the W3C RDB to RDF Mapping Language. +Meaningful semantic integration, however, is much harder than +syntactic alignment. Ontologies could be an interoperable and +declarative solution to this task. At a closer look, however, we find +that popular ontology languages, such as OWL and Datalog, cannot +express even the most basic relationships on the normalised data +format of KGs. Existential rules are more powerful, but may make +reasoning undecidable, and normalising them to suit KGs can destroy +syntactic restrictions that ensure decidability and low complexity. We +study this issue for several classes of existential rules and derive more +general syntactic criteria to recognise well-behaved rule-based ontologies +over knowledge graphs. + existential rules + + + existential rules + + + + 1ee386235c89959195075bc4944d5c68f8265f96 + + + + + Emanuele Della Valle + + + + + + + Emanuele Della Valle + Emanuele Della Valle + + + 2016-10-19T18:00:00 + Visualizing Semantic Table Annotations with TableMiner+ + Suvodeep Mazumdar and Ziqi Zhang + Visualizing Semantic Table Annotations with TableMiner+ + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:50:00 + RDF2Vec: RDF Graph Embeddings for Data Mining + RDF2Vec: RDF Graph Embeddings for Data Mining + + 2016-10-21T11:10:00 + 2016-10-21T10:50:00 + Petar Ristoski and Heiko Paulheim + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Institute of Information Systems, University of Lübeck + + Institute of Information Systems, University of Lübeck + Institute of Information Systems, University of Lübeck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Open Knowledge Belgium + + Open Knowledge Belgium + + + + + + Open Knowledge Belgium + + + + + + + + + + + + + + + + + + + + + Fabio Ciravegna + Fabio Ciravegna + + + + Fabio Ciravegna + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Stream Data + Continuous queries + + + + Linked Stream Data + Operator-aware approach for boosting performance in RDF stream processing + Operator-aware approach for boosting performance in RDF stream processing + Linked Data + + Semantic Web + To enable efficiency in stream processing, the evaluation of a query is usually performed over bounded parts of (potentially) unbounded streams, i.e., processing windows “slide” over the streams. To avoid inefficient re-evaluations of already evaluated parts of a stream in respect to a query, incremental evaluation strategies are applied, i.e., the query results are obtained incrementally from the result set of the preceding processing state without having to re-evaluate all input buffers. This method is highly efficient but it comes at the cost of having to maintain processing state, which is not trivial, and may defeat performance advantages of the incremental evaluation strategy. In the context of RDF streams the problem is further aggravated by the hard-to-predict evolution of the structure of RDF graphs over time and the application of sub-optimal implementation approaches, e.g., using relational technologies for storing data and processing states which incur significant performance drawbacks for graph-based query patterns. To address these performance problems, this paper proposes a set of novel operator-aware data structures coupled with incremental evaluation algorithms which outperform the counterparts of relational stream processing systems. This claim is demonstrated through extensive experimental results on both simulated and real datasets. + + + Continuous queries + To enable efficiency in stream processing, the evaluation of a query is usually performed over bounded parts of (potentially) unbounded streams, i.e., processing windows “slide” over the streams. To avoid inefficient re-evaluations of already evaluated parts of a stream in respect to a query, incremental evaluation strategies are applied, i.e., the query results are obtained incrementally from the result set of the preceding processing state without having to re-evaluate all input buffers. This method is highly efficient but it comes at the cost of having to maintain processing state, which is not trivial, and may defeat performance advantages of the incremental evaluation strategy. In the context of RDF streams the problem is further aggravated by the hard-to-predict evolution of the structure of RDF graphs over time and the application of sub-optimal implementation approaches, e.g., using relational technologies for storing data and processing states which incur significant performance drawbacks for graph-based query patterns. To address these performance problems, this paper proposes a set of novel operator-aware data structures coupled with incremental evaluation algorithms which outperform the counterparts of relational stream processing systems. This claim is demonstrated through extensive experimental results on both simulated and real datasets. + + stream processing + Semantic Web + + Operator-aware approach for boosting performance in RDF stream processing + Linked Data + + stream processing + + + + + + + + + + + + + + + + + + bd5f2180caec276f3e26c02ac360fe6b711f5a11 + + Harald Sack + + + + Harald Sack + Harald Sack + + + + + + + + + + + + + + Monash University + + Monash University + + + + + + Monash University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 44c9bbaf2d7e02ca4e27b3c5e53c92fe9136949e + Fernando Florenzano + + + + + + Fernando Florenzano + Fernando Florenzano + + + + + + + + + + + + + + + + + + f59443a95f72fede91ee05c4b766aeef3fc4e4a1 + Terue Takatsuki + + + + + + Terue Takatsuki + + Terue Takatsuki + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Max-Planck Institute for Informatics + Max Planck Institute for Informatics + Max Planck Institute for Informatics + + + Max Planck Institute for Informatics + + Max-Planck Institute for Informatics + Max-Planck Institute for Informatics + + + + 2016-10-20T13:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T13:30:00 + Streams + 2016-10-20T14:50:00 + Streams + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + Knowledge Representation + Knowledge Representation + 2016-10-19T11:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Fernando Florenzano, Denis Parra, Juan L. Reutter and Freddie Venegas + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + An interactive visualisation for RDF data + An interactive visualisation for RDF data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Marta Sabou + + + + + Marta Sabou + Marta Sabou + + + + Sebastian Hellmann + + + + + 3b9b030bfa83b9c747d525b7943829d3abc2813b + + + + + + Sebastian Hellmann + + Sebastian Hellmann + + + + + + + + + + + Roman Kontchakov + + + Roman Kontchakov + + + Roman Kontchakov + + + + + + + + + + + + + + + + + + + + + + + + + Linked Open Data + MusicWeb: music discovery with open linked semantic metadata + + This demo presents MusicWeb, a novel platform for linking music artists within a web-based application for discovering associations between them. MusicWeb provides a browsing experience using connections that are either extra-musical or tangential to music, such as the artists' political affiliation or social influence, or intra-musical, such as the artists' main instrument or most favoured musical key. The platform integrates open linked semantic metadata from various Semantic Web, music recommendation and social media data sources. The connections are further supplemented by thematic analysis of journal articles, blog posts and content-based similarity measures focussing on high level musical categories. + + Semantic Web + + + music metadata + + + + + + MusicWeb: music discovery with open linked semantic metadata + + This demo presents MusicWeb, a novel platform for linking music artists within a web-based application for discovering associations between them. MusicWeb provides a browsing experience using connections that are either extra-musical or tangential to music, such as the artists' political affiliation or social influence, or intra-musical, such as the artists' main instrument or most favoured musical key. The platform integrates open linked semantic metadata from various Semantic Web, music recommendation and social media data sources. The connections are further supplemented by thematic analysis of journal articles, blog posts and content-based similarity measures focussing on high level musical categories. + + + Semantic Web + + MusicWeb: music discovery with open linked semantic metadata + + Linked Open Data + semantic audio analysis + + + music metadata + music information retrieval + semantic audio analysis + + music information retrieval + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + 2016-10-19T18:00:00 + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + 2016-10-19T21:00:00 + + Md. Kamruzzaman Sarker, Adila A. Krisnadhi and Pascal Hitzler + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + 89f88f590a47d82f90e022147efdcd85b1e85f21 + Atsuko Yamaguchi + + Atsuko Yamaguchi + + + Atsuko Yamaguchi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + protege plugin + + linked data mining + + + + We present a Protege plugin implementing Swift Linked Data Miner, an anytime algorithm for extending an ontology with new subsumptions. The algorithm mines an RDF graph accessible via a SPARQL endpoint and proposes new SubClassOf axioms to the user. + We present a Protege plugin implementing Swift Linked Data Miner, an anytime algorithm for extending an ontology with new subsumptions. The algorithm mines an RDF graph accessible via a SPARQL endpoint and proposes new SubClassOf axioms to the user. + swift linked data miner + A Protege Plugin with Swift Linked Data Miner + + A Protege Plugin with Swift Linked Data Miner + A Protege Plugin with Swift Linked Data Miner + + + + + swift linked data miner + + + linked data mining + protege plugin + + + + + + + + Gentile + + + + Anna Lisa + Anna Lisa Gentile + + + + afc0b2d9d17f63457688ebab6a66bf2913774795 + Anna Lisa Gentile + + + Anna Lisa Gentile + + + + + + + + + + + + + + + + + + + Olivier Curé + + Olivier Curé + + + + + 33cd62b318bd504dde9eea1cdcf0095394e4ba40 + Olivier Curé + + + + + + + Chiara Ghidini + Chiara Ghidini + + + + + + + + Chiara Ghidini + + + + + + + + + + + + + + + + + + Duke University + + Duke University + + + Duke University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T14:40:00 + Melisachew Wudage Chekol and Giuseppe Pirrò + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + + 2016-10-19T14:40:00 + Containment of Expressive SPARQL Navigational Queries + 2016-10-19T14:40:00 + + + Containment of Expressive SPARQL Navigational Queries + 2016-10-19T14:40:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fabio Vitali + + feb8c8e23e4e4d60fc409a35349430509cad92f1 + + Fabio Vitali + + + Fabio Vitali + + + + + + + + + + David Martin + + + + + + + David Martin + + + + aae4343bbb16e012e0912037ff410a13ca36a0be + David Martin + + + + + + + + + + Andrea Maurino + + + + + + Andrea Maurino + + + + + + Andrea Maurino + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Thi-Nhu Nguyen, Hideaki Takeda, Khai Nguyen, Ryutaro Ichise and Tuan-Dung Cao + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + 2016-10-19T18:00:00 + + + + + 795f113ba89d73d202c0f85b54b33d95673d9038 + + + Melisachew Wudage Chekol + + + + Melisachew Wudage Chekol + Melisachew Wudage Chekol + + + + + + + Australian Bureau of Statistics / Australian National University + + + Australian Bureau of Statistics / Australian National University + + + + + Australian Bureau of Statistics / Australian National University + + + + + + + + + + + + + + + + + + + 2016-10-21T11:10:00 + Abstract Meaning Representations as Linked Data + + 2016-10-21T11:10:00 + + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + Gully Burns, Ulf Hermjakob and José Luis Ambite + Abstract Meaning Representations as Linked Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Takeshi Morita + Takeshi Morita + + + Takeshi Morita + 1f96d51ea567d4068288c14a41f9e8fbc6892693 + + + + + + + + + Saverio Ieva + + + + + 534d7e9edbed40cf8679d963a295792201da70d7 + Saverio Ieva + Saverio Ieva + + + + + + + + + + + + + + + + + 2016-10-20T13:50:00 + Structuring Linked Data Search Results Using Probabilistic Soft Logic + 2016-10-20T14:10:00 + Duhai Alshukaili, Alvaro A. A. Fernandes and Norman Paton + Structuring Linked Data Search Results Using Probabilistic Soft Logic + + 2016-10-20T14:10:00 + + + 2016-10-20T14:10:00 + 2016-10-20T13:50:00 + 2016-10-20T14:10:00 + + + + + + + + + + Christof Mahieu + + Christof Mahieu + + + + + ad4c342fb603f96a6bcd281bf49efae4ad7d9873 + + + Christof Mahieu + + + + + + Ritsumeikan University + + + + + + Ritsumeikan University + + + + Ritsumeikan University + + + + + + + + Thomas Vanhove + b28efc640adbf8d996b9605f01e7699f68c9847d + + + + + + Thomas Vanhove + + Thomas Vanhove + + + + + + + + + + + + + + Software Engineering + Linked data + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + + + Software Engineering + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + + + + + + Data Engineering + + + + Linked data + + Unified Governance + + Ontologies + + Effective, collaborative integration of software services and big data to develop insightful analytics, for Web-scale systems, is now a crucial techno-economic challenge. This requires new combined data and software engineering processes and tools. Semantic metadata standards such as RDFS and OWL, and linked data principles, provide a technical grounding for such integrated systems given an appropriate model of the domain. In this paper we introduce the ALIGNED suite of ontologies or vocabularies specifically designed to model the information exchange needs of combined software and data engineering processes. The models have been deployed to enable: tool-chain integration, such as the exchange of data quality reports; cross-domain communication, such as interlinked data and software unit testing; mediation of the system design process through the capture of design intents and as a source of context for model-driven software engineering processes. These ontologies are deployed in trial live web-scale, data-intensive system development environments in both the commercial and academic domains. We exemplify the usage of the suite on a complex collaborative software and data engineering scenario from the legal information system domain. + + + + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + + Effective, collaborative integration of software services and big data to develop insightful analytics, for Web-scale systems, is now a crucial techno-economic challenge. This requires new combined data and software engineering processes and tools. Semantic metadata standards such as RDFS and OWL, and linked data principles, provide a technical grounding for such integrated systems given an appropriate model of the domain. In this paper we introduce the ALIGNED suite of ontologies or vocabularies specifically designed to model the information exchange needs of combined software and data engineering processes. The models have been deployed to enable: tool-chain integration, such as the exchange of data quality reports; cross-domain communication, such as interlinked data and software unit testing; mediation of the system design process through the capture of design intents and as a source of context for model-driven software engineering processes. These ontologies are deployed in trial live web-scale, data-intensive system development environments in both the commercial and academic domains. We exemplify the usage of the suite on a complex collaborative software and data engineering scenario from the legal information system domain. + Ontologies + + Data Engineering + + + Unified Governance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Osaka Prefecture University + Osaka Prefecture University + + Osaka Prefecture University + + + + + + Andrea Giovanni Nuzzolese, Anna Lisa Gentile, Valentina Presutti and Aldo Gangemi + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Generating Conference Linked Open Data in One Click + Generating Conference Linked Open Data in One Click + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + Danh Le-Phuoc + + + + Danh Le-Phuoc + + + + + + Danh Le-Phuoc + + + + + + + + + + + + + + + + + Aesthetics + + + + + + Visual Analytics + + + + + + + Information Visualisation + Aesthetics + Affective Graphs: The Visual Appeal of Linked Data + + The essence and value of Linked Data lies in the ability of humans and machines to query, access and reason upon highly structured and formalised data. Ontology structures provide an unambiguous description of the structure and content of data. While a multitude of software applications and visualization systems have been developed over the past years for Linked Data, there is still a significant gap that exists between applications that consume Linked Data and interfaces that have been designed with significant focus on aesthetics. Though the importance of aesthetics in affecting the usability, effectiveness and acceptability of user interfaces have long been recognised, little or no explicit attention has been paid to the aesthetics of Linked Data applications. In this paper, we introduce a formalised approach to developing aesthetically pleasing semantic web interfaces by following aesthetic principles and guidelines identified from literature. We apply such principles to design and develop a generic approach of using visualizations to support exploration of Linked Data, in an interface that is pleasing to users. This provides users with means to browse ontology structures, enriched with statistics of the underlying data, facilitating exploratory activities and enabling visual query for highly precise information needs. We evaluated our approach in three ways: an initial objective evaluation comparing our approach with other well-known interfaces for the semantic web and two user evaluations with semantic web researchers. + + Affective Graphs: The Visual Appeal of Linked Data + + + Affective Graphs: The Visual Appeal of Linked Data + The essence and value of Linked Data lies in the ability of humans and machines to query, access and reason upon highly structured and formalised data. Ontology structures provide an unambiguous description of the structure and content of data. While a multitude of software applications and visualization systems have been developed over the past years for Linked Data, there is still a significant gap that exists between applications that consume Linked Data and interfaces that have been designed with significant focus on aesthetics. Though the importance of aesthetics in affecting the usability, effectiveness and acceptability of user interfaces have long been recognised, little or no explicit attention has been paid to the aesthetics of Linked Data applications. In this paper, we introduce a formalised approach to developing aesthetically pleasing semantic web interfaces by following aesthetic principles and guidelines identified from literature. We apply such principles to design and develop a generic approach of using visualizations to support exploration of Linked Data, in an interface that is pleasing to users. This provides users with means to browse ontology structures, enriched with statistics of the underlying data, facilitating exploratory activities and enabling visual query for highly precise information needs. We evaluated our approach in three ways: an initial objective evaluation comparing our approach with other well-known interfaces for the semantic web and two user evaluations with semantic web researchers. + + + + Visual Analytics + Semantic Web + Linked Data + + Semantic Web + Linked Data + Information Visualisation + + + + + + + + + + + + + + + + + + + + + + Anastasia Dimou + + + Anastasia Dimou + + Anastasia Dimou + 6ae5a96a9885e213c64efd5848f4dd5b6dd1c16a + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ddae780bd6b450041ec6eafd76849aebeecbf987 + Catia Pesquita + + + Catia Pesquita + Catia Pesquita + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Visual Exploration + Exploratory search + + + + Knowledge exploration + This paper presents a novel approach to Linked Data exploration that uses Encyclopedic Knowledge Patterns (EKPs) as relevance criteria for selecting, organising, and visualising knowledge. EKP are discovered by mining the linking structure of Wikipedia and evaluated by means of a user-based study, which shows that they are cognitively sound as models for building entity summarisations. We implemented a tool named Aemoo that supports EKP-driven knowledge exploration and integrates data coming from heterogeneous resources, namely static and dynamic knowledge as well as text and Linked Data. Aemoo is evaluated by means of controlled, task-driven user experiments in order to assess its usability, and ability to provide relevant and serendipitous information as compared to two existing tools: Google and RelFinder. + + Visual Exploration + Aemoo: Linked Data exploration based on Knowledge Patterns + + + This paper presents a novel approach to Linked Data exploration that uses Encyclopedic Knowledge Patterns (EKPs) as relevance criteria for selecting, organising, and visualising knowledge. EKP are discovered by mining the linking structure of Wikipedia and evaluated by means of a user-based study, which shows that they are cognitively sound as models for building entity summarisations. We implemented a tool named Aemoo that supports EKP-driven knowledge exploration and integrates data coming from heterogeneous resources, namely static and dynamic knowledge as well as text and Linked Data. Aemoo is evaluated by means of controlled, task-driven user experiments in order to assess its usability, and ability to provide relevant and serendipitous information as compared to two existing tools: Google and RelFinder. + + Knowledge Patterns + + + Knowledge Patterns + Analysis of Linked Data + + Aemoo: Linked Data exploration based on Knowledge Patterns + + + + Analysis of Linked Data + Aemoo: Linked Data exploration based on Knowledge Patterns + Exploratory search + + + + + Knowledge exploration + + + Rule-Based Reasoning using State Space Search + 2016-10-19T21:00:00 + + + Rule-Based Reasoning using State Space Search + 2016-10-19T21:00:00 + Dieter De Paepe, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + National Technical University of Athens + + + + National Technical University of Athens + + + + + National Technical University of Athens + + + + + + + + + + + + + VISTA GmbH + VISTA GmbH + + VISTA GmbH + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Poznan University of Technology + + Poznan University of Technology + + + + + + Poznan University of Technology + + + + + + + + + Youngkyoung Ham + + + + + + Youngkyoung Ham + + bbd09cfc7eff31c89fc07d652f64a0670beec621 + + + Youngkyoung Ham + + + + + + + + + + + + + + OWL + + + Ontologies + + OBDA + + Databases + SPARQL + Ontologies + Ontop: Answering SPARQL queries over relational databases + + Ontop: Answering SPARQL queries over relational databases + Databases + + + OBDA + + + We present Ontop, an open-source Ontology-Based Data Access (OBDA) system that allows for querying relational data sources through a conceptual representation of the domain of interest, provided in terms of an ontology, to which the data sources are mapped. Key features of Ontop are its solid theoretical foundations, a virtual approach to OBDA, which avoids materializing triples and is implemented through the query rewriting technique, extensive optimizations exploiting all elements of the OBDA architecture, its compliance to all relevant W3C recommendations (including SPARQL queries, R2RML mappings, and OWL 2 QL and RDFS ontologies), and its support for all major relational databases. + + + + + + Ontop + + SPARQL + + + OWL + Ontop + + + RDF + + + R2RML + We present Ontop, an open-source Ontology-Based Data Access (OBDA) system that allows for querying relational data sources through a conceptual representation of the domain of interest, provided in terms of an ontology, to which the data sources are mapped. Key features of Ontop are its solid theoretical foundations, a virtual approach to OBDA, which avoids materializing triples and is implemented through the query rewriting technique, extensive optimizations exploiting all elements of the OBDA architecture, its compliance to all relevant W3C recommendations (including SPARQL queries, R2RML mappings, and OWL 2 QL and RDFS ontologies), and its support for all major relational databases. + + + + + R2RML + + Ontop: Answering SPARQL queries over relational databases + + RDF + + + + a241f0cbbef38075d6656af5d98a712b95b4aa00 + Hiroshi Masuya + + + + + + Hiroshi Masuya + + + + Hiroshi Masuya + + + + + + + + + + + + + + + + + + + + Charlotte Jewell + 8be120b54dd3dac2bdce06124b1e30353c832c26 + + + + + + + Charlotte Jewell + + Charlotte Jewell + + + + + + + + + + + + + FarolApp: Live Linked Data on Light Pollution + evolution + FarolApp is a mobile web application that aims to increase the awareness of light pollution by generating illustrative maps for cities and by encouraging citizens and public administrations to provide street light information in an ubiquitous and interactive way using online street views. In addition to the maps, FarolApp builds on existing sources to generate and provide up-to-date data by crowdsourced user annotations. Generated data is available as dereferenceable Linked Data resources in several RDF formats and via a queryable SPARQL endpoint. The demo presented in this paper illustrates how FarolApp maintains continuously evolving Linked Data that reflect the current status of city street light infrastructures and use that data to generate light pollution maps. + linked data + + + + + + + light pollution + + + light pollution + crowdsourcing + evolution + FarolApp: Live Linked Data on Light Pollution + + FarolApp is a mobile web application that aims to increase the awareness of light pollution by generating illustrative maps for cities and by encouraging citizens and public administrations to provide street light information in an ubiquitous and interactive way using online street views. In addition to the maps, FarolApp builds on existing sources to generate and provide up-to-date data by crowdsourced user annotations. Generated data is available as dereferenceable Linked Data resources in several RDF formats and via a queryable SPARQL endpoint. The demo presented in this paper illustrates how FarolApp maintains continuously evolving Linked Data that reflect the current status of city street light infrastructures and use that data to generate light pollution maps. + + + + + crowdsourcing + + linked data + + + FarolApp: Live Linked Data on Light Pollution + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université Paris 13 & STLab (CNR-ISTC) + + + Université Paris 13 & STLab (CNR-ISTC) + + + + Université Paris 13 & STLab (CNR-ISTC) + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Enhancing Rule-based OWL Reasoning on Spark + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Enhancing Rule-based OWL Reasoning on Spark + 2016-10-19T18:00:00 + + Zhihui Liu, Xiaowang Zhang and Zhiyong Feng + + + + + Zhiyong Feng + + + + Zhiyong Feng + Zhiyong Feng + + + de8b98e10e9b1cf660322f919ca0f3e2f75ae736 + + + + + + + + Yavor Nenov + Yavor Nenov + + + + Yavor Nenov + + + + + + + + b4e08b0ac920df0d9f2a84703c4cd08f802fb42a + + + + + Linked Open Vocabularies + + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + + LOV + + + + + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + Vocabulary catalogue + + + LOV + + + + One of the major barriers to the deployment of Linked Data is the difficulty that data publishers have in determining which vocabularies to use to describe the semantics of data. This system report describes Linked Open Vocabularies (LOV), a high quality catalogue of reusable vocabularies for the description of data on the Web. The LOV initiative gathers and makes visible indicators that have not been previously harvested such as the interconnections between vocabularies, version history along with past and current referent (individual or organization). The report details the various components of the system along with some innovations such as the introduction of a property-level boost in the vocabulary search scoring which takes into account the property's type (e.g rdfs:label, dc:comment) associated with a matching literal value. By providing an extensive range of data access methods (full-text search, SPARQL endpoint, API, data dump or UI), the project aims at facilitating the reuse of well-documented vocabularies in the Linked Data ecosystem. The adoption of LOV by many applications and methods shows the importance of such a set of vocabularies and related features for the ontology design activity and the publication of data on the Web. + + Linked Data + + Linked Data + Ontology search + + Ontology search + Vocabulary catalogue + One of the major barriers to the deployment of Linked Data is the difficulty that data publishers have in determining which vocabularies to use to describe the semantics of data. This system report describes Linked Open Vocabularies (LOV), a high quality catalogue of reusable vocabularies for the description of data on the Web. The LOV initiative gathers and makes visible indicators that have not been previously harvested such as the interconnections between vocabularies, version history along with past and current referent (individual or organization). The report details the various components of the system along with some innovations such as the introduction of a property-level boost in the vocabulary search scoring which takes into account the property's type (e.g rdfs:label, dc:comment) associated with a matching literal value. By providing an extensive range of data access methods (full-text search, SPARQL endpoint, API, data dump or UI), the project aims at facilitating the reuse of well-documented vocabularies in the Linked Data ecosystem. The adoption of LOV by many applications and methods shows the importance of such a set of vocabularies and related features for the ontology design activity and the publication of data on the Web. + + + + Linked Open Vocabularies + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dieter De Paepe + + + + + + + e2216c32b8b5be98e0e4e6a5632c9a869e684107 + + Dieter De Paepe + + Dieter De Paepe + + + + Shen Gao + + + + + Shen Gao + + Shen Gao + + + + + + 3b33725c7633555310bb4084e06053bd6d2be33e + + + + + RDF + SPARQL federation + + + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + + + + + + + + The Web of Data has grown enormously over the last years. Currently, it comprises a large compendium of interlinked and distributed datasets from multiple domains. Running complex queries on this compendium often requires accessing data from different endpoints within one query. The abundance of datasets and the need for running complex query has thus motivated a considerable body of work on SPARQL query federation systems, the dedicated means to access data distributed over the Web of Data. However, the granularity of previous evaluations of such systems has not allowed deriving of insights concerning their behavior in different steps involved during federated query processing. In this work, we perform extensive experiments to compare state-of-the-art SPARQL endpoint federation systems using the comprehensive performance evaluation framework FedBench. In addition to considering the tradition query runtime as an evaluation criterion, we extend the scope of our performance evaluation by considering criteria, which have not been paid much attention to in previous studies. In particular, we consider the number of sources selected, the total number of SPARQL ASK requests used, the completeness of answers as well as the source selection time. Yet, we show that they have a significant impact on the overall query runtime of existing systems. Moreover, we extend FedBench to mirror a highly distributed data environment and assess the behavior of existing systems by using the same performance criteria. As the result we provide a detailed analysis of the experimental outcomes that reveal novel insights for improving current and future SPARQL federation systems. + The Web of Data has grown enormously over the last years. Currently, it comprises a large compendium of interlinked and distributed datasets from multiple domains. Running complex queries on this compendium often requires accessing data from different endpoints within one query. The abundance of datasets and the need for running complex query has thus motivated a considerable body of work on SPARQL query federation systems, the dedicated means to access data distributed over the Web of Data. However, the granularity of previous evaluations of such systems has not allowed deriving of insights concerning their behavior in different steps involved during federated query processing. In this work, we perform extensive experiments to compare state-of-the-art SPARQL endpoint federation systems using the comprehensive performance evaluation framework FedBench. In addition to considering the tradition query runtime as an evaluation criterion, we extend the scope of our performance evaluation by considering criteria, which have not been paid much attention to in previous studies. In particular, we consider the number of sources selected, the total number of SPARQL ASK requests used, the completeness of answers as well as the source selection time. Yet, we show that they have a significant impact on the overall query runtime of existing systems. Moreover, we extend FedBench to mirror a highly distributed data environment and assess the behavior of existing systems by using the same performance criteria. As the result we provide a detailed analysis of the experimental outcomes that reveal novel insights for improving current and future SPARQL federation systems. + + + RDF + + Web of Data + + + + + Web of Data + + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + + SPARQL federation + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + + + + + + + + + German Rigau + + + + + German Rigau + + + + German Rigau + + + + + + + + + + Radityo Eko Prasojo + + + + + + + Radityo Eko Prasojo + Radityo Eko Prasojo + + 5a968589cad6977d19faf62ea7d325861d9aaeec + + + + Fabien Gandon + + + + + + Fabien Gandon + 583b2ab35d1cef69e21b25a7f36ec5a36e11d31d + + Fabien Gandon + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + Christophe Gravier and Julien Subercaze + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + 2016-10-19T21:00:00 + + + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e556260231af1a25b322d9486bd2fbb20c702ca6 + Fanghuai Hu + + + Fanghuai Hu + + + + + + + Fanghuai Hu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pieter Bonte + a500e551ca8091a80cab79c2204408231496684d + Pieter Bonte + + Pieter Bonte + + + + + + + + + + + + + + + + + + + Parma Nand + Parma Nand + + + 8915a3d2eb4ef06ef9a8120814babc2cb65f9eac + + Parma Nand + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Quality Assessment for Linked Data: A Survey + The development and standardization of semantic web technologies has resulted in an unprecedented volume of data being published on the Web as Linked Data (LD). However, we observe widely varying data quality ranging from extensively curated datasets to crowdsourced and extracted data of relatively low quality. In this article, we present the results of a systematic review of approaches for assessing the quality of LD. We gather existing approaches and analyze them qualitatively. In particular, we unify and formalize commonly used terminologies across papers related to data quality and provide a comprehensive list of 18 quality dimensions and 69 metrics. Additionally, we qualitatively analyze the 30 core approaches and 12 tools using a set of attributes. The aim of this article is to provide researchers and data curators a comprehensive understanding of existing work, thereby encouraging further experimentation and development of new approaches focused towards data quality, specifically for LD. + Survey + data quality + + The development and standardization of semantic web technologies has resulted in an unprecedented volume of data being published on the Web as Linked Data (LD). However, we observe widely varying data quality ranging from extensively curated datasets to crowdsourced and extracted data of relatively low quality. In this article, we present the results of a systematic review of approaches for assessing the quality of LD. We gather existing approaches and analyze them qualitatively. In particular, we unify and formalize commonly used terminologies across papers related to data quality and provide a comprehensive list of 18 quality dimensions and 69 metrics. Additionally, we qualitatively analyze the 30 core approaches and 12 tools using a set of attributes. The aim of this article is to provide researchers and data curators a comprehensive understanding of existing work, thereby encouraging further experimentation and development of new approaches focused towards data quality, specifically for LD. + Survey + Quality Assessment for Linked Data: A Survey + + + + + + + + Linked Data + + + + + assessment + + + data quality + Linked Data + + + Quality Assessment for Linked Data: A Survey + assessment + + + + + + + + + + + + + + + + + + + + + + + d121bf16f374de868ec9203902995d9f1a903b8b + + + + + Lara Piccolo + Lara Piccolo + Lara Piccolo + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Julien Subercaze and Christophe Gravier + Parallel sort-merge-join reasoning + + Parallel sort-merge-join reasoning + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + CNR-ISTC + + + + CNR-ISTC + + CNR-ISTC + + + + + + + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + + + + + + + + + + This paper proposes a mapping of the Linked Data Platform (LDP) specification for Constrained Application Protocol (CoAP). Main motivation stems from the fact that LDP W3C Recommendation presents resource management primitives for HTTP only. Hence, use cases related to Web of Things scenarios, where HTTP-based communication and infrastructures are unfeasible, are partially neglected. A general translation of LDP-HTTP requests and responses is provided, as well as a fully comprehensive framework for HTTP-to-CoAP proxying. The theoretical work is corroborated by an experimental campaign using the W3C Test Suite for LDP. + + + + Linked Data Platform + CoAP + Semantic Web of Things + + Linked Data Platform + This paper proposes a mapping of the Linked Data Platform (LDP) specification for Constrained Application Protocol (CoAP). Main motivation stems from the fact that LDP W3C Recommendation presents resource management primitives for HTTP only. Hence, use cases related to Web of Things scenarios, where HTTP-based communication and infrastructures are unfeasible, are partially neglected. A general translation of LDP-HTTP requests and responses is provided, as well as a fully comprehensive framework for HTTP-to-CoAP proxying. The theoretical work is corroborated by an experimental campaign using the W3C Test Suite for LDP. + + + + + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + + Semantic Web of Things + + + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + CoAP + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Natural language processing + + + + Real world data + + Information integration + Knowledge graphs have gained increasing popularity in the past couple of years, thanks to their adoption in everyday search engines. Typically, they consist of fairly static and encyclopedic facts about persons and organizations–e.g. a celebrity’s birth date, occupation and family members–obtained from large repositories such as Freebase or Wikipedia. In this paper, we present a method and tools to automatically build knowledge graphs from news articles. As news articles describe changes in the world through the events they report, we present an approach to create Event-Centric Knowledge Graphs (ECKGs) using state-of-the-art natural language processing and semantic web techniques. Such ECKGs capture long-term developments and histories on hundreds of thousands of entities and are complementary to the static encyclopedic information in traditional knowledge graphs. We describe our event-centric representation schema, the challenges in extracting event information from news, our open source pipeline, and the knowledge graphs we have extracted from four different news corpora: general news (Wikinews), the FIFA world cup, the Global Automotive Industry, and Airbus A380 airplanes. Furthermore, we present an assessment on the accuracy of the pipeline in extracting the triples of the knowledge graphs. Moreover, through an event-centered browser and visualization tool we show how approaching information from news in an event-centric manner can increase the user’s understanding of the domain, facilitates the reconstruction of news story lines, and enable to perform exploratory investigation of news hidden facts." + + + + Knowledge graphs have gained increasing popularity in the past couple of years, thanks to their adoption in everyday search engines. Typically, they consist of fairly static and encyclopedic facts about persons and organizations–e.g. a celebrity’s birth date, occupation and family members–obtained from large repositories such as Freebase or Wikipedia. In this paper, we present a method and tools to automatically build knowledge graphs from news articles. As news articles describe changes in the world through the events they report, we present an approach to create Event-Centric Knowledge Graphs (ECKGs) using state-of-the-art natural language processing and semantic web techniques. Such ECKGs capture long-term developments and histories on hundreds of thousands of entities and are complementary to the static encyclopedic information in traditional knowledge graphs. We describe our event-centric representation schema, the challenges in extracting event information from news, our open source pipeline, and the knowledge graphs we have extracted from four different news corpora: general news (Wikinews), the FIFA world cup, the Global Automotive Industry, and Airbus A380 airplanes. Furthermore, we present an assessment on the accuracy of the pipeline in extracting the triples of the knowledge graphs. Moreover, through an event-centered browser and visualization tool we show how approaching information from news in an event-centric manner can increase the user’s understanding of the domain, facilitates the reconstruction of news story lines, and enable to perform exploratory investigation of news hidden facts." + + + Event extraction + + Information integration + Event-centric knowledge + + + + Event-centric knowledge + + + + + + + + Real world data + Building event-centric knowledge graphs from news + + + Natural language processing + + + + Event extraction + + Big data + Building event-centric knowledge graphs from news + + Big data + + + Building event-centric knowledge graphs from news + + + + 360997bfdc91a53cacee965dca5f86d68117543c + + + + + Zhihui Liu + + + + + Zhihui Liu + + Zhihui Liu + + + + 4379fc3d8337cff050550e8527912744d5b7a4ec + + + Guozhu Dong + + + + Guozhu Dong + + + + Guozhu Dong + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lorraine McNerney + 646f2520c086e0052617e0ac2a8d2bb57df27c2b + + + Lorraine McNerney + + + + + + + + + Lorraine McNerney + + + EOXPLORE UG + + + + + + + EOXPLORE UG + + + + EOXPLORE UG + + + + + + + Ulf Hermjakob + + 7d2b13bdb66ac486307bb2c77f6aaed8235516cd + + Ulf Hermjakob + + + Ulf Hermjakob + + + + + + Stanford University + Stanford University + + + + + + + + + + Stanford University + + + + + + + + domain adaptation + + Domain Adaptation for Ontology Localization + Domain Adaptation for Ontology Localization + statistical machine translation + Ontology localization is the task of adapting an ontology to a different cultural context, and has been identified as an important task in the context of the Multilingual Semantic Web vision. The key task in ontology localization is translating the lexical layer of an ontology, i.e., its labels, into some foreign language. For this task, we hypothesize that the translation quality can be improved by adapting a machine translation system to the domain of the ontology. To this end, we build on the success of existing statistical machine translation (SMT) approaches, and investigate the impact of different domain adaptation techniques on the task. In particular, we investigate three techniques: (i) enriching a phrase table by domain-specific translation candidates acquired from existing Web resources, (ii) relying on Explicit Semantic Analysis as an additional technique for scoring a certain translation of a given source phrase, as well as (iii) adaptation of the language model by means of weighting n-grams with scores obtained from topic modelling. We present in detail the impact of each of these three techniques on the task of translating ontology labels. We show that these techniques have a generally positive effect on the quality of translation of the ontology and that, in combination, they provide a significant improvement in quality. + + + Domain Adaptation for Ontology Localization + + + + + + + + ontology localization + + Ontology localization is the task of adapting an ontology to a different cultural context, and has been identified as an important task in the context of the Multilingual Semantic Web vision. The key task in ontology localization is translating the lexical layer of an ontology, i.e., its labels, into some foreign language. For this task, we hypothesize that the translation quality can be improved by adapting a machine translation system to the domain of the ontology. To this end, we build on the success of existing statistical machine translation (SMT) approaches, and investigate the impact of different domain adaptation techniques on the task. In particular, we investigate three techniques: (i) enriching a phrase table by domain-specific translation candidates acquired from existing Web resources, (ii) relying on Explicit Semantic Analysis as an additional technique for scoring a certain translation of a given source phrase, as well as (iii) adaptation of the language model by means of weighting n-grams with scores obtained from topic modelling. We present in detail the impact of each of these three techniques on the task of translating ontology labels. We show that these techniques have a generally positive effect on the quality of translation of the ontology and that, in combination, they provide a significant improvement in quality. + statistical machine translation + + + + ontology localization + + + domain adaptation + + + + + + + + + + + Vienna University of Economics and Business - WU Wien + + Vienna University of Economics and Business - WU Wien + + + + + Vienna University of Economics and Business - WU Wien + + + + + + + + + + SPARQL is the W3C standard query language for querying data expressed in the Resource Description Framework (RDF). The increasing amounts of RDF data available raise a major need and research interest in building efficient and scalable distributed SPARQL query evaluators. In this context, we propose and share SPARQLGX: our implementation of a distributed RDF datastore based on Apache Spark. SPARQLGX is designed to leverage existing Hadoop infrastructures for evaluating SPARQL queries. SPARQLGX relies on a translation of SPARQL queries into executable Spark code that adopts evaluation strategies according to (1) the storage method used and (2) statistics on data. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. We report on experiments which show how SPARQLGX compares to related state-of-the-art implementations. Using a simple design, SPARQLGX already represents an interesting alternative in several scenarios. We share it as a resource for the further construction of efficient SPARQL evaluators. + + Experimental Validation + + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + SPARQL Evaluator + + + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + SPARQL Evaluator + + + + Distributed RDF Store + + Distributed RDF Store + Experimental Validation + + + + SPARQL is the W3C standard query language for querying data expressed in the Resource Description Framework (RDF). The increasing amounts of RDF data available raise a major need and research interest in building efficient and scalable distributed SPARQL query evaluators. In this context, we propose and share SPARQLGX: our implementation of a distributed RDF datastore based on Apache Spark. SPARQLGX is designed to leverage existing Hadoop infrastructures for evaluating SPARQL queries. SPARQLGX relies on a translation of SPARQL queries into executable Spark code that adopts evaluation strategies according to (1) the storage method used and (2) statistics on data. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. We report on experiments which show how SPARQLGX compares to related state-of-the-art implementations. Using a simple design, SPARQLGX already represents an interesting alternative in several scenarios. We share it as a resource for the further construction of efficient SPARQL evaluators. + + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + + + + + + + + + + + + + + + + + + + + + + + + b06220bfcb6dff56ff364afd456f951eefaafeaa + + + + Tomokazu Yoshida + Tomokazu Yoshida + Tomokazu Yoshida + + + + + + + + + + + + + + + + + + + + + + + Information Sciences Institute, University of Southern California + + Information Sciences Institute, University of Southern California + + + + + + + + Information Sciences Institute, University of Southern California + + + + Ricardo Usbeck + + + cf1c5ea391961440c9c6f7b8348d0f920a3cea77 + + Ricardo Usbeck + + + Ricardo Usbeck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1984133103dbdcc226687d631320a8bc7ca117ac + Aram Galstyan + + + + + + + + Aram Galstyan + + Aram Galstyan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The Pistoia Alliance Ontologies Mapping project (http://www.pistoiaalliance.org/projects/ontologies-mapping) was set up to find or create better tools or services for mapping between ontologies in the same domain and to establish best practices for ontology management in the Life Sciences. It was proposed through the Pistoia Alliance Ideas Portfolio Platform (IP3: https://www.qmarkets.org/live/pistoia/home) which was selected by the Pistoia Alliance Operations Team for development of a formal business case. +The project has delivered a set of guidelines for best practice which build on existing standards. We show how these guidelines can be used as a "checklist" to support the application and mapping of source ontologies in the disease and phenotype domain. Another important output of this project was to specify the requirements for an Ontologies Mapping Tool. These requirements were used in a preliminary survey that established that such tools already exist which substantially meet them. Therefore, we have developed a formal process to define and submit a request for information (RFI) from existing ontologies mapping tool providers to enable their evaluation. This RFI process will be described and we summarise our findings from evaluation of seven ontologies mapping tools from academic and commercial providers. The guidelines and RFI materials are accessible on a public wiki:- https://pistoiaalliance.atlassian.net/wiki/display/PUB/Ontologies+Mapping+Resources. +A critical component of any Ontologies Mapping tool is the embedded ontology matching algorithm. Therefore, the Pistoia Alliance Ontologies Mapping Project is supporting development and evaluation of ontology matching algorithms though sponsorship and organisation of the new Disease and Phenotype track for OAEI 2016, which is also be summarised in this poster. This new track has been organised because currently, mappings between ontologies in a given data domain are mostly curated by bioinformatics and disease experts in academia or industry, who would benefit from automation of their procedures. This could be accomplished through implementation of ontology matching algorithms into their existing workflow environment or investment in an ontologies mapping tool for management of the ontologies mapping life cycle. +Work is in progress by the Ontologies Mapping project is to develop user requirements for an ontologies mapping service. We will conduct a survey of Pistoia Alliance members to understand the need for such a service and whether it should be implemented in future. + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + + + Mapping + + + + + + + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + + Tools + + + + + + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + Guidelines + Evaluation + + Tools + + + + Algorithms + + + + Guidelines + + + Ontologies + Mapping + Evaluation + The Pistoia Alliance Ontologies Mapping project (http://www.pistoiaalliance.org/projects/ontologies-mapping) was set up to find or create better tools or services for mapping between ontologies in the same domain and to establish best practices for ontology management in the Life Sciences. It was proposed through the Pistoia Alliance Ideas Portfolio Platform (IP3: https://www.qmarkets.org/live/pistoia/home) which was selected by the Pistoia Alliance Operations Team for development of a formal business case. +The project has delivered a set of guidelines for best practice which build on existing standards. We show how these guidelines can be used as a "checklist" to support the application and mapping of source ontologies in the disease and phenotype domain. Another important output of this project was to specify the requirements for an Ontologies Mapping Tool. These requirements were used in a preliminary survey that established that such tools already exist which substantially meet them. Therefore, we have developed a formal process to define and submit a request for information (RFI) from existing ontologies mapping tool providers to enable their evaluation. This RFI process will be described and we summarise our findings from evaluation of seven ontologies mapping tools from academic and commercial providers. The guidelines and RFI materials are accessible on a public wiki:- https://pistoiaalliance.atlassian.net/wiki/display/PUB/Ontologies+Mapping+Resources. +A critical component of any Ontologies Mapping tool is the embedded ontology matching algorithm. Therefore, the Pistoia Alliance Ontologies Mapping Project is supporting development and evaluation of ontology matching algorithms though sponsorship and organisation of the new Disease and Phenotype track for OAEI 2016, which is also be summarised in this poster. This new track has been organised because currently, mappings between ontologies in a given data domain are mostly curated by bioinformatics and disease experts in academia or industry, who would benefit from automation of their procedures. This could be accomplished through implementation of ontology matching algorithms into their existing workflow environment or investment in an ontologies mapping tool for management of the ontologies mapping life cycle. +Work is in progress by the Ontologies Mapping project is to develop user requirements for an ontologies mapping service. We will conduct a survey of Pistoia Alliance members to understand the need for such a service and whether it should be implemented in future. + Algorithms + + + + + Ontologies + + + + + + + + + + + + + + + + + 2016-10-19T12:00:00 + Can you imagine... a language for combinatorial creativity? + 2016-10-19T11:40:00 + + + 2016-10-19T12:00:00 + 2016-10-19T12:00:00 + Fabian M. Suchanek, Colette Menard, Meghyn Bienvenu and Cyril Chapellier + Can you imagine... a language for combinatorial creativity? + + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + + + + + + + + Fujitsu Ireland + Fujitsu Ireland + Fujitsu Ireland + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + audio effects + + + audio effects + + music production + + + + ontology + + This paper discusses an extension to the Audio Effect Ontology (AUFX-O) for the interdisciplinary classification of audio effect types. The ontology extension implements a unified classification system that draws on knowledge from different music-related disciplines and is designed to facilitate the retrieval of audio effect information based on low-level and semantic aspects. It extends AUFX-O enabling communication between agents from different disciplines within the field of music creation and production. After briefly discussing the ontology, we show how it can be used to efficiently classify and retrieve effect types. + music production + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + ontology + + semantic web + This paper discusses an extension to the Audio Effect Ontology (AUFX-O) for the interdisciplinary classification of audio effect types. The ontology extension implements a unified classification system that draws on knowledge from different music-related disciplines and is designed to facilitate the retrieval of audio effect information based on low-level and semantic aspects. It extends AUFX-O enabling communication between agents from different disciplines within the field of music creation and production. After briefly discussing the ontology, we show how it can be used to efficiently classify and retrieve effect types. + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + + + semantic web + + + + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + + + + + + + + + + + + + + + + + state space search + + + + Semantic Web reasoners are powerful tools that allow the extraction of implicit information from RDF data. This information is reachable through the definition of ontologies and/or rules provided to the reasoner. To achieve this, various algorithms are used by different reasoners. In this paper, we explain how state space search can be applied to perform backward-chaining rule-based reasoning. State space search is an approach used in the Artificial Intelligence domain that solves problems by modeling them as a graph and searching (using diverse algorithms) for solutions within this graph. State space search offers inherent proof generation and the ability to plug in different search algorithms to determine the characteristics of the reasoner such as: speed, memory or ensuring shortest proof generation. + rule-based reasoning + + + + + Rule-Based Reasoning using State Space Search + reasoning + + + + + + Rule-Based Reasoning using State Space Search + + state space search + Rule-Based Reasoning using State Space Search + Semantic Web reasoners are powerful tools that allow the extraction of implicit information from RDF data. This information is reachable through the definition of ontologies and/or rules provided to the reasoner. To achieve this, various algorithms are used by different reasoners. In this paper, we explain how state space search can be applied to perform backward-chaining rule-based reasoning. State space search is an approach used in the Artificial Intelligence domain that solves problems by modeling them as a graph and searching (using diverse algorithms) for solutions within this graph. State space search offers inherent proof generation and the ability to plug in different search algorithms to determine the characteristics of the reasoner such as: speed, memory or ensuring shortest proof generation. + + reasoning + + + + + + rule-based reasoning + + + + + + Linked Data + Prototypes + + Prototypes + Knowledge Representation + Linked Data + In recent years RDF and OWL have become the most common knowledge representation languages in use on the Web, propelled by the recommendation of the W3C. In this paper we examine an alternative way to represent knowledge based on Prototypes. This Prototype based representation has different properties, which we argue to be +more suitable for data sharing and reuse on the Web. Prototypes avoid the distinction between classes and instances and provide means for objects based data sharing and reuse. + +In this paper we discuss the requirements and design principles for Knowledge Representation based on Prototypes on the Web, after which we propose a formal syntax and semantics. We show how to embed knowledge representation based on Prototypes in the current Semantic Web standard stack. An implementation and practical evaluation of the system is presented in a separate resource paper. + + + + + In recent years RDF and OWL have become the most common knowledge representation languages in use on the Web, propelled by the recommendation of the W3C. In this paper we examine an alternative way to represent knowledge based on Prototypes. This Prototype based representation has different properties, which we argue to be +more suitable for data sharing and reuse on the Web. Prototypes avoid the distinction between classes and instances and provide means for objects based data sharing and reuse. + +In this paper we discuss the requirements and design principles for Knowledge Representation based on Prototypes on the Web, after which we propose a formal syntax and semantics. We show how to embed knowledge representation based on Prototypes in the current Semantic Web standard stack. An implementation and practical evaluation of the system is presented in a separate resource paper. + Knowledge Representation on the Web revisited: the Case for Prototypes + + + Knowledge Representation on the Web revisited: the Case for Prototypes + + + + + Knowledge Representation on the Web revisited: the Case for Prototypes + Knowledge Representation + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Texas Health Science Center + + + University of Texas Health Science Center + + + + University of Texas Health Science Center + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Axel-Cyrille Ngonga Ngomo + + + + + Axel-Cyrille Ngonga Ngomo + + Axel-Cyrille Ngonga Ngomo + 3e873fc82e7405de39cb8dc6f2d2c2e445f8c043 + + + + + + + + + + In this paper we describe cLODg2 (conference Linked Open Data generator - version 2), a tool to collect, refine and produce Linked Data about scientific conferences with their associated publications, participants and events. Conference metadata collected from different unstructured and semi-structured resources must be expressed with appropriate vocabularies to be exposed as Linked Data. cLODg2 facilitates this task by providing a one-click workflow to generate data which is ready to be integrated in the ScholarlyData.org dataset. cLODg2 is an open source project, which has the aim to foster the publication of scholarly Linked Open Data and encourage collaborative efforts in this direction between researchers and publishers. + + Generating Conference Linked Open Data in One Click + + + semantic web dog food + linked open data + + + + scholarlydata + + scholarlydata + semantic publishing + + Generating Conference Linked Open Data in One Click + + + + linked open data + + + + semantic publishing + + Generating Conference Linked Open Data in One Click + + semantic web dog food + + In this paper we describe cLODg2 (conference Linked Open Data generator - version 2), a tool to collect, refine and produce Linked Data about scientific conferences with their associated publications, participants and events. Conference metadata collected from different unstructured and semi-structured resources must be expressed with appropriate vocabularies to be exposed as Linked Data. cLODg2 facilitates this task by providing a one-click workflow to generate data which is ready to be integrated in the ScholarlyData.org dataset. cLODg2 is an open source project, which has the aim to foster the publication of scholarly Linked Open Data and encourage collaborative efforts in this direction between researchers and publishers. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 6f4d40a62c93d31e98c21f7d17f4caa489dc37cb + Haoxuan Li + + + Haoxuan Li + + + Haoxuan Li + + + Adrian Soto + + + 24fd857befa33b9941e2165d18316451b90c9a03 + Adrian Soto + + + + + + + + + + Adrian Soto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Free University of Bozen-Bolzano + Free University of Bozen-Bolzano + + + + + + Free University of Bozen-Bolzano + + + + + + + + + + + + + + + + + + + + + + + + + Rule-based reasoning + OWL + + Spark + The rule-based OWL reasoning is to compute the deductive +closure of an ontology by applying RDF/RDFS and OWL entailment +rules. In this paper, we present an approach to enhancing the perfor- +mance of the rule-based OWL reasoning on Spark based on a locally +optimal executable strategy. Firstly, we divide all rules (27 in total) in- +to four main classes, namely, SPO rules (5 rules), type rules (7 rules), +sameAs rules (7 rules), and schema rules (8 rules) since, as we investi- +gated, those triples corresponding to the rst three classes of rules are +overwhelming (e.g., over 99% in the LUBM dataset) in our practical +world. Secondly, based on the interdependence among those entailment +rules in each class, we pick out an optimal rule executable order of each +class and then combine them into a new rule execution order of all rules. +Finally, we implement the new rule execution order on Spark. The exper- +imental results show that the running time of our approach is improved +by about 30% as compared to Kim & Park's algorithm (2015). + + + + + Enhancing Rule-based OWL Reasoning on Spark + The rule-based OWL reasoning is to compute the deductive +closure of an ontology by applying RDF/RDFS and OWL entailment +rules. In this paper, we present an approach to enhancing the perfor- +mance of the rule-based OWL reasoning on Spark based on a locally +optimal executable strategy. Firstly, we divide all rules (27 in total) in- +to four main classes, namely, SPO rules (5 rules), type rules (7 rules), +sameAs rules (7 rules), and schema rules (8 rules) since, as we investi- +gated, those triples corresponding to the rst three classes of rules are +overwhelming (e.g., over 99% in the LUBM dataset) in our practical +world. Secondly, based on the interdependence among those entailment +rules in each class, we pick out an optimal rule executable order of each +class and then combine them into a new rule execution order of all rules. +Finally, we implement the new rule execution order on Spark. The exper- +imental results show that the running time of our approach is improved +by about 30% as compared to Kim & Park's algorithm (2015). + Semantic Web + + Enhancing Rule-based OWL Reasoning on Spark + + Rule-based reasoning + Spark + + + + OWL + Semantic Web + + Enhancing Rule-based OWL Reasoning on Spark + + + + + + + + + 2016-10-20T14:10:00 + Ivan Ermilov, Jens Lehmann, Michael Martin and Sören Auer + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + LODStats: The Data Web Census Dataset + + LODStats: The Data Web Census Dataset + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + 2016-10-20T14:10:00 + + + + + + + + + + Christian Neuenstadt + Christian Neuenstadt + 385bc1cfaff8363dcc699a5101b7a4730cc5adfb + + + Christian Neuenstadt + + + + + + + + + + + + + + + + + + Siemens AG, Corporate Technology + Siemens AG, Corporate Technology + + Siemens AG, Corporate Technology + + + + + + + + + + + + Sejin Chun, Jooik Jung, Xiongnan Jin, Seungjun Yoon and Kyong-Ho Lee + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 209af6a5da064a4a0f0cb89a336bfeb0ebcc196d + + Rik Van de Walle + + + + + + + + + Rik Van de Walle + Rik Van de Walle + + + + + + + + + + 2016-10-20T16:30:00 + 2016-10-20T16:50:00 + Faceted search over RDF-based knowledge graphs + + Faceted search over RDF-based knowledge graphs + 2016-10-20T16:30:00 + + Marcelo Arenas, Bernardo Cuenca Grau, Evgeny Kharlamov, Šarūnas Marciuška and Dmitriy Zheleznyakov + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + + + + + + + + + + + + + + + + + + + + + + Yolanda Gil + Yolanda Gil + + Yolanda Gil + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + Evgeny Kharlamov, Sebastian Brandt, Martin Giese, Ernesto Jimenez-Ruiz, Yannis Kotidis, Steffen Lamparter, Theofilos Mailis, Christian Neuenstadt, Özgür Lütfü Özcep, Christoph Pinkel, Ahmet Soylu, Christoforos Svingos, Dmitriy Zheleznyakov, Ian Horrocks, Yannis Ioannidis, Ralf Möller and Arild Waaler + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + Bernardo Cuenca Grau + + + Bernardo Cuenca Grau + + + + + + + c0879a5783f8750335b2d2830dd7dbb99dc8f94b + + + Bernardo Cuenca Grau + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T14:50:00 + Ontop of Geospatial Databases + 2016-10-21T14:50:00 + Ontop of Geospatial Databases + 2016-10-21T14:30:00 + + + 2016-10-21T14:30:00 + + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + Konstantina Bereta and Manolis Koubarakis + + + + Mohamed H. Gad-Elrab + + + + + + + + + 9a4c2982ecb311ef12921f43c6bd3be430914cee + + Mohamed H. Gad-Elrab + + Mohamed H. Gad-Elrab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KRDB Research Centre, Free University of Bozen-Bolzano + + + + KRDB Research Centre, Free University of Bozen-Bolzano + KRDB Research Centre, Free University of Bozen-Bolzano + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LAAS-CNRS / IRIT + + + + + + LAAS-CNRS / IRIT + + + LAAS-CNRS / IRIT + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Guozheng Rao + + + + + 7a4a3f9540990bfe00519e0906bd4802070cde5e + Guozheng Rao + Guozheng Rao + + + + + Smart Trip Alternatives for the Curious + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Damien Graux, Pierre Geneves and Nabil Layaida + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + 2016-10-19T21:00:00 + Smart Trip Alternatives for the Curious + + + + + + + + + Mathias Van Compernolle + + + + + + + 1c03751ebd8cd99eeb57e8fcb853771646e3134d + + Mathias Van Compernolle + Mathias Van Compernolle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + VU University Amsterdam + VU University Amsterdam + + + + + + + + VU University Amsterdam + + + + + + + + + + + + + + Christophe Debruyne + + + ed3783609bbe08e801b87ecc375c11b6e5ae765b + + Christophe Debruyne + + + + + + + + Christophe Debruyne + + + + + + + + 443d701dde45605cf194b2de164b01e17b72f473 + Katja Hose + Katja Hose + + + + + + Katja Hose + + + + + + Semantic labeling: A domain-independent approach + + Semantic labeling is the process of mapping attributes in data sources to classes in an ontology and is a necessary step in heterogeneous data integration. Variations in data formats, attribute names and even ranges of values of data make this a very challenging task. In this paper, we present a novel domain-independent approach to automatic semantic labeling that uses machine learning techniques. Previous approaches use machine learning to learn a model that extracts features related to the data of a domain, which requires the model to be re-trained for every new domain. Our solution uses similarity metrics as features to compare against labeled domain data and learns a matching function to infer the correct semantic labels for data. Since our approach depends on the learned similarity metrics but not the data itself, it is domain-independent and only needs to be trained once to work effectively across multiple domains. In our evaluation, our approach achieves higher accuracy than other approaches, even when the learned models are trained on domains other than the test domain. + + + semantic labeling + + + + + + Semantic labeling: A domain-independent approach + data integration + + semantic labeling + semantic web + + Semantic labeling: A domain-independent approach + + + Semantic labeling is the process of mapping attributes in data sources to classes in an ontology and is a necessary step in heterogeneous data integration. Variations in data formats, attribute names and even ranges of values of data make this a very challenging task. In this paper, we present a novel domain-independent approach to automatic semantic labeling that uses machine learning techniques. Previous approaches use machine learning to learn a model that extracts features related to the data of a domain, which requires the model to be re-trained for every new domain. Our solution uses similarity metrics as features to compare against labeled domain data and learns a matching function to infer the correct semantic labels for data. Since our approach depends on the learned similarity metrics but not the data itself, it is domain-independent and only needs to be trained once to work effectively across multiple domains. In our evaluation, our approach achieves higher accuracy than other approaches, even when the learned models are trained on domains other than the test domain. + semantic web + + + + + data integration + + + + + + + + + + + + Peter Boncz + Peter Boncz + + + + + + + + + + + + da89467b6c6397e5e8ebac0f8c307fabc54664b4 + Peter Boncz + + + + + + + + + + + + + + + + + + + + + + + + b59ca8493302f1644ba3fcb198a64536c6530d86 + Torben Bach Pedersen + + + + + Torben Bach Pedersen + + Torben Bach Pedersen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fujitsu Laboratories Limited + + Fujitsu Laboratories Limited + Fujitsu Laboratories Limited + + + + + + + + + + + + + The University of the Basque Country + + The University of the Basque Country + + + The University of the Basque Country + + + + + + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + Corentin Jouault, Kazuhisa Seta and Yuki Hayashi + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + 2016-10-19T21:00:00 + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Freddy Lecue + + + + Freddy Lecue + Freddy Lecue + + + f1a1055588342958afbd758ff970e26533fd3bdb + + + Matthew Horridge + + + + + + + + + + Matthew Horridge + Matthew Horridge + + + + + + + + + + + + + + + + + + + + + + + + + + Khai Nguyen and Ryutaro Ichise + Ranking Feature for Classifier-based Instance Matching + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Ranking Feature for Classifier-based Instance Matching + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + Andrea Giovanni Nuzzolese, Anna Lisa Gentile, Valentina Presutti and Aldo Gangemi + 2016-10-20T10:30:00 + Conference Linked Data: the ScholarlyData project + + 2016-10-20T10:50:00 + 2016-10-20T10:50:00 + + 2016-10-20T10:50:00 + Conference Linked Data: the ScholarlyData project + 2016-10-20T10:30:00 + + 2016-10-20T10:50:00 + + + Maria Poveda + + + + + + Maria Poveda + + Maria Poveda + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Tabea Tietz, Jörg Waitelonis, Joscha Jäger and Harald Sack + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jens Lehmann + + + + + Jens Lehmann + 01fee219e665ecea3905f361517b2bd4a344975d + Jens Lehmann + + + + + + + + + + + + + Bernard Vatant + + + + Bernard Vatant + + + Bernard Vatant + + + + + 2016-10-21T10:30:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T10:30:00 + Lightning Talks + Lightning Talks + + + + + + + + + + + + + + + + + + + + + + + + + + Majid Ghasemi-Gol + + + Majid Ghasemi-Gol + + + + + + + + Majid Ghasemi-Gol + e1a8526e1bf49d3bf707a897cc345675dcc0e946 + + + + + + + + + + + + Yonsei University + + + + + + + Yonsei University + + Yonsei University + + + + + + Ghislain Auguste Atemezing + + + Ghislain Auguste Atemezing + Ghislain Auguste Atemezing + + + + + 9971b68740871b230036efe3e33193acd84e2a53 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Luigi Asprino + c775678ec000c80a483a4c2fc69f34d6a62b6281 + + Luigi Asprino + Luigi Asprino + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Abraham Bernstein + + Abraham Bernstein + 8704ad77580618cb845036d3a15626d30fd828c3 + + + + + Abraham Bernstein + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yovisto GmbH + + + + yovisto GmbH + + + yovisto GmbH + + + + 2016-10-20T11:50:00 + 2016-10-20T11:30:00 + Wei Hu, Haoxuan Li, Zequn Sun, Xinqi Qian, Lingkun Xue, Ermei Cao and Yuzhong Qu + + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + 2016-10-20T11:50:00 + + + 2016-10-20T11:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + + + + + + + + + + + + + + + + + + Daniela Petrelli + + + + + + + + + + Daniela Petrelli + Daniela Petrelli + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Building Evidence Graph for Clinical Decision Support + Building Evidence Graph for Clinical Decision Support + + 2016-10-19T21:00:00 + Jing Mei + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + + + 2016-10-21T13:50:00 + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + Monika Solanki, Bojan Božić, Markus Freudenberg, Rob Brennan and Dimitris Kontokostas + 2016-10-21T13:50:00 + + 2016-10-21T13:50:00 + 2016-10-21T13:50:00 + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + 2016-10-21T13:30:00 + + + + + + + + + + + + + + Ildikó Szabó + + + + + + Ildikó Szabó + + + + + 5005aaa611b454785502e9c6b2d2eaf35376beba + Ildikó Szabó + + + + + + + + + + + + + + + + + Martin J. Kollingbaum + + + + 4b5792c4b473dff25831c0ee0f172d4ec595d14d + + + + + + Martin J. Kollingbaum + Martin J. Kollingbaum + + + 2016-10-20T11:10:00 + + FOOD: FOod in Open Data + 2016-10-20T11:10:00 + 2016-10-20T10:50:00 + + + Silvio Peroni, Giorgia Lodi, Luigi Asprino, Aldo Gangemi and Valentina Presutti + 2016-10-20T11:10:00 + 2016-10-20T10:50:00 + FOOD: FOod in Open Data + 2016-10-20T11:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Declan O'Sullivan + + + + Declan O'Sullivan + + 936a0a2881af653ae0c38aae27793dc67e354d0b + + + + Declan O'Sullivan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Šarūnas Marciuška + + + + + + + Sarunas Marciuska + + Sarunas Marciuska + Šarūnas Marciuška + fa769f08236892f18aca15816b35e8d9c1c5598b + + Šarūnas Marciuška + Sarunas Marciuska + + + + + + + + + + + + + + + + + + + Enzo Zerega + + Enzo Zerega + + + + + 5df1567ef3dc6392bf8b14e33442048b648d81eb + + + + + + Enzo Zerega + + + + University of Sheffield + + + + + + + University of Sheffield + + + + University of Sheffield + + + Michael Cochez + + + + + + + Michael Cochez + + Michael Cochez + 4b0bd3e29e5d93684250c5dad31c5915396903e4 + + + + + + + + + + As our computers embed more cores, efficient reasoners are designed with parallelization but also CPU and memory friendliness in mind. % +These latter contribute to make reasoner tractable in practice despite the computational complexity of logical fragments. % +However, creating benchmark to monitor this CPU-friendliness for many reasoners, datasets and logical fragments is a tedious task. % +In this paper, we present the Université Saint-Etienne Reasoners Benchmark (USE-RB) that automates the setup and execution of reasoners benchmarks with a particular attention to monitor how +reasoners work in harmony with the CPU. + + + + benchmark + Reasoning + + memory + + + performance + + caches + + + + benchmark + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + + + + performance + + memory + As our computers embed more cores, efficient reasoners are designed with parallelization but also CPU and memory friendliness in mind. % +These latter contribute to make reasoner tractable in practice despite the computational complexity of logical fragments. % +However, creating benchmark to monitor this CPU-friendliness for many reasoners, datasets and logical fragments is a tedious task. % +In this paper, we present the Université Saint-Etienne Reasoners Benchmark (USE-RB) that automates the setup and execution of reasoners benchmarks with a particular attention to monitor how +reasoners work in harmony with the CPU. + + + + caches + Reasoning + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + + + + + + + + + 5644932825980e4bc88c96181c827e9c8f7fef62 + + + Mohamed Gaha + Mohamed Gaha + + + + Mohamed Gaha + + + + + + + + + University of Manchester + + University of Manchester + + + University of Manchester + + + + + + + Kyong-Ho Lee + + + Kyong-Ho Lee + + + + 6b7b2e4d1dd79a0d7b30e138da96ba558a4c952a + + Kyong-Ho Lee + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Athens + + + University of Athens + + + + University of Athens + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Parallel sort-merge-join reasoning + Jena + in-memory reasoner + Jena + + + + + open-source + + + RDFSPlus + + + We present an in-memory, cross-platform, parallel reasoner +for RDFS and RDFSPlus . Inferray uses carefully optimized hash-based +join and sorting algorithms to perform parallel materialization. Designed +to take advantage of the architecture of modern CPUs, Inferray exhibits +a very good uses of cache and memory bandwidth. It offers state-of-the- +art performance on RDFS materialization, outperforms its counterparts +on RDFSPlus and can be connected with Jena. +Reasons to see the poster: i) Presentation of the system, how to use +it; ii) Discussion about implementation, source code walkthrough. + RDFSPlus + + + high-performance + + + + open-source + + high-performance + + Parallel sort-merge-join reasoning + We present an in-memory, cross-platform, parallel reasoner +for RDFS and RDFSPlus . Inferray uses carefully optimized hash-based +join and sorting algorithms to perform parallel materialization. Designed +to take advantage of the architecture of modern CPUs, Inferray exhibits +a very good uses of cache and memory bandwidth. It offers state-of-the- +art performance on RDFS materialization, outperforms its counterparts +on RDFSPlus and can be connected with Jena. +Reasons to see the poster: i) Presentation of the system, how to use +it; ii) Discussion about implementation, source code walkthrough. + in-memory reasoner + Parallel sort-merge-join reasoning + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Paramita Mirza, Simon Razniewski and Werner Nutt + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + 2016-10-19T21:00:00 + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jaroslaw Bak + + + Jaroslaw Bak + + b43584a6470f93690d641a57586dd3d7e213feb5 + + + + + + + + Jaroslaw Bak + + + + Gully Burns + + + Gully Burns + Gully Burns + + + + + cd43f244307564f1f20d95e655e08fc11614fa45 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Yannis Ioannidis + 6a0e45f72065338cc49ba2804c37502f7a51ad1b + + + + + + Yannis Ioannidis + + Yannis Ioannidis + + + + + + + + + + + + + + + + + + + + + + + + Piek Vossen + + + + + + + + + + + Piek Vossen + Piek Vossen + + + + + + + + + + + University of Stuttgart + University of Stuttgart + + + + + + + + + University of Stuttgart + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Femke Ongenae, Pieter Bonte, Jelle Nelis, Thomas Vanhove and Filip De Turck + + + + + LinkGen: Multipurpose Linked Data Generator + + rdf + data generator + + power-law distribution + rdf + data generator + linked data + + + + The paper presents a synthetic linked data generator that can generate a large amount of RDF data based on certain statistical distribution. Data generation is platform independent, supports streaming mode and produces output in N-Triples and N-Quad format. Different sets of output can be generated using various configuration parameters and the outputs are reproducible. Unlike existing generators, our generator accepts any vocabulary and can supplement the output with noisy and inconsistent data. The generator has an option to inter-link instances with real ones provided that the user supplies entities from real datasets. + + + power-law distribution + linked data + + + + LinkGen: Multipurpose Linked Data Generator + + LinkGen: Multipurpose Linked Data Generator + + + + + The paper presents a synthetic linked data generator that can generate a large amount of RDF data based on certain statistical distribution. Data generation is platform independent, supports streaming mode and produces output in N-Triples and N-Quad format. Different sets of output can be generated using various configuration parameters and the outputs are reproducible. Unlike existing generators, our generator accepts any vocabulary and can supplement the output with noisy and inconsistent data. The generator has an option to inter-link instances with real ones provided that the user supplies entities from real datasets. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + John P. McCrae + + + + + + John P. Mccrae + + 4afa4da7b0a6c64c6f7dc841cc0fa67d130fbb06 + + John P. McCrae + + John P. McCrae + + John P. Mccrae + + John P. Mccrae + + + + + + + + + + Télécom ParisTech + + + Télécom ParisTech + Télécom ParisTech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Athens University of Economics and Business + + Athens University of Economics and Business + + + + + Athens University of Economics and Business + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Towards an Interface for User-Friendly Linked Data Generation Administration + + + Anastasia Dimou, Pieter Heyvaert, Wouter Maroy, Laurens De Graeve, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + Towards an Interface for User-Friendly Linked Data Generation Administration + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + IBM + + + + + + IBM + + IBM + + + + + Ernesto Jiménez-Ruiz + + Ernesto Jimenez-Ruiz + + Ernesto Jimenez-Ruiz + + Ernesto Jiménez-Ruiz + + + Ernesto Jiménez-Ruiz + + + + + bcd45a988fceba58dbe34a54e7797e89b514dc51 + Ernesto Jimenez-Ruiz + + + + + Yuki Hayashi + + + + + 703abcd0aea51561ecbee2f55af5f23a39a9dfcc + + + Yuki Hayashi + + + Yuki Hayashi + + + + + + + + + Faculty of Computer Science and Media Technology, Norwegian University of Science and Technology + Faculty of Computer Science and Media Technology, Norwegian University of Science and Technology + Faculty of Computer Science and Media Technology, Norwegian University of Science and Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T14:50:00 + 2016-10-21T15:30:00 + 2016-10-21T15:30:00 + 2016-10-21T15:30:00 + 2016-10-21T14:50:00 + Coffee Break + 2016-10-21T15:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + 2016-10-21T16:50:00 + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + 2016-10-21T16:50:00 + 2016-10-21T16:30:00 + + + + Vincent Link, Steffen Lohmann and Florian Haag + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + 2016-10-20T16:30:00 + 2016-10-20T16:10:00 + + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + Pierre-Yves Vandenbussche, Ghislain A. Atemezing, Maria Poveda and Bernard Vatant + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + 2016-10-20T16:10:00 + + + DBpedia Mappings Quality Assessment + + + Anastasia Dimou, Dimitris Kontokostas, Markus Freudenberg, Ruben Verborgh, Jens Lehmann, Erik Mannens, Sebastian Hellmann and Rik Van de Walle + + DBpedia Mappings Quality Assessment + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + 2016-10-20T15:50:00 + + + CubeQA—Question Answering on RDF Data Cubes + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + CubeQA—Question Answering on RDF Data Cubes + Konrad Höffner, Jens Lehmann and Ricardo Usbeck + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + + David Shotton + David Shotton + David Shotton + + + 2fda526be970b5e393d425de0f4b47a6a4f05850 + + + + + + + + + + + + + + + 2016-10-21T10:00:00 + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + Coffee Break + 2016-10-21T10:00:00 + 2016-10-21T10:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Makoto Iwayama + + + + Makoto Iwayama + + + Makoto Iwayama + + + + + I. Budak Arpinar + + + + + f83f4591138b6fe67692b5e33a00106e1e96cec8 + + + + + I. Budak Arpinar + + I. Budak Arpinar + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + Amna Basharat, Khaled Rasheed and I. Budak Arpinar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MSD IT Global Innovation Center + + + + + MSD IT Global Innovation Center + + + + + + MSD IT Global Innovation Center + + + + + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + 2016-10-20T16:30:00 + + Olaf Hartig and M. Tamer Ozsu + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + + + + 2016-10-20T14:50:00 + Quality Assessment for Linked Data: A Survey + 2016-10-20T14:30:00 + Quality Assessment for Linked Data: A Survey + Amrapali Zaveri, Anisa Rula, Andrea Maurino, Ricardo Pietrobon, Jens Lehmann and Sören Auer + 2016-10-20T14:50:00 + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + + + + + + + + + + + + + + + + + + + + Explanation + + + Expenses + Prediction + Reasoning + + Anomaly detection + Prediction + + + + Spend optimization + Reasoning + + + + Travel expenses represent up to 7% of organizations overall budget. Existing expenses systems are designed for reporting expenses types and amount, but not for understanding how to save and spend. We present a system, manipulating semantic web technologies, which aims at identifying, explaining, predicting abnormal expense claims by employees of large organizations in 500+ cities. + + + + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + Explanation + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + Travel expenses represent up to 7% of organizations overall budget. Existing expenses systems are designed for reporting expenses types and amount, but not for understanding how to save and spend. We present a system, manipulating semantic web technologies, which aims at identifying, explaining, predicting abnormal expense claims by employees of large organizations in 500+ cities. + + Anomaly detection + + Expenses + + Spend optimization + + + + + + Domagoj Vrgoc + + + 116719fc41fd2bd28d7bce271756b5eab2c9eda0 + + + Domagoj Vrgoc + + + Domagoj Vrgoc + + + + + + IIT Bombay + IIT Bombay + + + + + + + IIT Bombay + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + Atsuko Yamaguchi, Kouji Kozaki, Kai Lenz, Yasunori Yamamoto, Hiroshi Masuya and Norio Kobayashi + + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + 2016-10-19T18:00:00 + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + ac3a38ea3c73dfd4c5ee1d5d9ad98ede3ffe92d1 + + + + + Seiji Koide + + + + + Seiji Koide + Seiji Koide + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ae61113b7fb9de8879e311a8048a893a5f46ff4f + + + + + + Martin Romacker + Martin Romacker + + + + + + + Martin Romacker + + + Eugenio Di Sciascio + + + Eugenio Di Sciascio + + + + + + + + Eugenio Di Sciascio + 9786472dc7aa4754914460418664e559cef39ca4 + + + + + + + + + + + + + + + + + + + + + + + Erdal Kuzey + + 75b6b3e761989307fe995c636f3dfe3b60be62e6 + + + + + + Erdal Kuzey + + + + + Erdal Kuzey + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Constructing Semantic Networks of Development Activities from Weekly Reports + + Motoyuki Takaai and Yohei Yamane + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Constructing Semantic Networks of Development Activities from Weekly Reports + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Georgia + + + + University of Georgia + + University of Georgia + + + + 2016-10-18T12:00:00 + Le Tuan Anh + + + Linked Data processing for Embedded Devices + 2016-10-18T12:00:00 + Linked Data processing for Embedded Devices + 2016-10-18T12:00:00 + 2016-10-18T11:45:00 + 2016-10-18T11:45:00 + 2016-10-18T12:00:00 + + + + + + Kalliopi Pafilis + + + Kalliopi Pafilis + + + + + + Kalliopi Pafilis + + + + + + + Peter Patel-Schneider + + Peter Patel-Schneider + + 35a838a13f014e0d3924f7a0aeeb929105fbf234 + + + + + + + + Peter Patel-Schneider + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e25fa578672a1e755ed7a35fb082e01a69f39f16 + + Rafael S. Gonçalves + + + + Rafael S. Gonçalves + + Rafael S. Gonçalves + + + + + + + + + + + Nuance Communications + + + Nuance Communications + + + + + + Nuance Communications + + + + + + + + + + + + + + + + + + Achim Rettinger + + Achim Rettinger + + Achim Rettinger + + c8534f2cda2d909bbb64009ae8156d78764438db + + + + + + + + 2016-10-21T16:10:00 + Giuseppe De Giacomo, Xavier Oriol, Riccardo Rosati and Domenico Fabio Savo + + + 2016-10-21T16:10:00 + 2016-10-21T15:50:00 + 2016-10-21T16:10:00 + + 2016-10-21T16:10:00 + Updating DL-Lite Ontologies through First-Order Queries + 2016-10-21T15:50:00 + Updating DL-Lite Ontologies through First-Order Queries + + + Satoshi Kume, Hiroshi Masuya, Yosky Kataoka and Norio Kobayashi + 2016-10-19T21:00:00 + + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + 2016-10-19T18:00:00 + + + + + KCL London + KCL London + + + KCL London + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:20:00 + Knowledge Representation on the Web revisited: the Case for Prototypes + + 2016-10-19T11:20:00 + Michael Cochez, Stefan Decker and Eric Prud'Hommeaux + 2016-10-19T11:00:00 + + 2016-10-19T11:20:00 + 2016-10-19T11:00:00 + Knowledge Representation on the Web revisited: the Case for Prototypes + + 2016-10-19T11:20:00 + + + + + + + + Database Center for Life Science + + + Database Center for Life Science + + Database Center for Life Science + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Oslo + + + + + + University of Oslo + + + University of Oslo + + + + + 686466b4d76673eee98164954a886377cda2251f + + Christian Hennig + + Christian Hennig + + + + + + + + Christian Hennig + + + + + + Database + Database + Taxonomic relations (also known as ``isa'' relations or hypernymy relations) represent a fundamental atomic piece of structured information for many text understanding applications. Such structured information is part of the basic topology structure of knowledge bases and foundational ontologies. Despite the availability of shared knowledge bases, some NLP applications (e.g. Ontology Learning) require automatic isa relation harvesting techniques to cope with the coverage of domain-specific and long-tail terms. We present a Web Application to directly query our repository of isa relations extracted from the Common Crawl (the largest publicly available crawl of the Web). Our resource can be also downloaded for research purposes and accessed programmatically (we also release a Java application programming interface). + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + + Natural Language Processing techniques for the Semantic Web + + + Natural Language Processing techniques for the Semantic Web + + + + + + + Information extraction + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + Information extraction + + + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + + + + + + + + Taxonomic relations (also known as ``isa'' relations or hypernymy relations) represent a fundamental atomic piece of structured information for many text understanding applications. Such structured information is part of the basic topology structure of knowledge bases and foundational ontologies. Despite the availability of shared knowledge bases, some NLP applications (e.g. Ontology Learning) require automatic isa relation harvesting techniques to cope with the coverage of domain-specific and long-tail terms. We present a Web Application to directly query our repository of isa relations extracted from the Common Crawl (the largest publicly available crawl of the Web). Our resource can be also downloaded for research purposes and accessed programmatically (we also release a Java application programming interface). + + + + + + + + + + + + + + + + + + + + + + + + University of Bradford + + + + University of Bradford + + University of Bradford + + + + + + + + + + University of Oxford + + + + + + University of Oxford + + University of Oxford + + + + + + + + + + + + + + + + + + + + + Syeda Sana E Zainab + + + 4f2a79237677cee7075e7624b8342e939d6f293d + Syeda Sana E Zainab + Syeda Sana E Zainab + + + + + + + + + + + + + + + + + + + + Md. Kamruzzaman Sarker, David Carral, Adila A. Krisnadhi and Pascal Hitzler + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Modeling OWL with Rules: The ROWL Protege Plugin + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Modeling OWL with Rules: The ROWL Protege Plugin + + + + + + + + + + + + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:30:00 + Extracting Semantic Information for e-Commerce + 2016-10-21T16:50:00 + + Bruno Charron, Yu Hirate, David Purcell and Martin Rezk + + Extracting Semantic Information for e-Commerce + 2016-10-21T16:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ryota Nishimura + + + + + + + + + 1c1ea4eff63576d018f18437c07a5e82fbdc2033 + + + + Ryota Nishimura + Ryota Nishimura + + + + + + + 2016-10-19T21:00:00 + Robin Keskisärkkä + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Representing RDF Stream Processing Queries in RSP-SPIN + 2016-10-19T21:00:00 + + Representing RDF Stream Processing Queries in RSP-SPIN + + + Wright State University + + Wright State University + + Wright State University + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + An Ontology based Map Converter for Intelligent Vehicles + 2016-10-19T18:00:00 + Lihua Zhao, Naoya Arakawa, Hiroaki Wagatsuma and Ryutaro Ichise + 2016-10-19T18:00:00 + + An Ontology based Map Converter for Intelligent Vehicles + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + Function + + Data has been made reusable and machine-interpretable by publishing it as Linked Data. +However, Linked Data automatic processing is not fully achieved yet, +as manual effort is still needed to integrate existing tools and libraries within a certain technology stack. +To enable automatic processing, +we propose exposing functions and methods as Linked Data, +publishing it in different programming languages, +using content negotiation to cater to different technology stacks, +and making use of common, technology-independent identifiers to make them discoverable. +As such, we can enable automatic processing of Linked Data across formats and technology stacks. +By using discovery endpoints, similarly as being used to discover vocabularies and ontologies, +the publication of these functions can remain decentralized whilst still be easily discoverable. + Content Negotiation + Discovering and Using Functions via Content Negotiation + + + + Linked Data + + + + + Content Negotiation + + + + + + + Function + + Discovering and Using Functions via Content Negotiation + + + + + + + Data has been made reusable and machine-interpretable by publishing it as Linked Data. +However, Linked Data automatic processing is not fully achieved yet, +as manual effort is still needed to integrate existing tools and libraries within a certain technology stack. +To enable automatic processing, +we propose exposing functions and methods as Linked Data, +publishing it in different programming languages, +using content negotiation to cater to different technology stacks, +and making use of common, technology-independent identifiers to make them discoverable. +As such, we can enable automatic processing of Linked Data across formats and technology stacks. +By using discovery endpoints, similarly as being used to discover vocabularies and ontologies, +the publication of these functions can remain decentralized whilst still be easily discoverable. + Linked Data + Discovering and Using Functions via Content Negotiation + + + + + + + + + + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + + Minh-Duc Pham and Peter Boncz + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + Exploiting Emergent Schemas to make RDF systems more efficient + 2016-10-21T13:50:00 + Exploiting Emergent Schemas to make RDF systems more efficient + + + + + 2016-10-21T09:00:00 + 2016-10-21T10:00:00 + 2016-10-21T10:00:00 + 2016-10-21T10:00:00 + Keynote: Hiroaki Kitano + 2016-10-21T10:00:00 + Keynote: Hiroaki Kitano + 2016-10-21T09:00:00 + + + + + + + 2016-10-18T12:30:00 + 2016-10-18T14:00:00 + 2016-10-18T14:00:00 + 2016-10-18T14:00:00 + Lunch + 2016-10-18T14:00:00 + Lunch + 2016-10-18T12:30:00 + + + + + + Amna Basharat + + + Amna Basharat + + 9aa3e37c9e5e63bb5aea612c758f39244ccf8c8f + + + + + Amna Basharat + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Yuting Song, Taisuke Kimura, Biligsaikhan Batjargal and Akira Maeda + + + + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + + Xiang Nan Ren, Olivier Curé, Houda Khrouf, Zakia Kazi-Aoul and Yousra Chabchoub + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Renzo Angles and Claudio Gutierrez + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:20:00 + + + 2016-10-19T14:20:00 + The multiset semantics of SPARQL patterns + The multiset semantics of SPARQL patterns + 2016-10-19T14:00:00 + + + + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + Enriching Data Sources + Enriching Data Sources + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Zequn Sun + + + + f12622c58ed65cf28e8a16eeaf113d623e1d6f51 + + Zequn Sun + + + Zequn Sun + + + + + + Pieter Simoens + + Pieter Simoens + + + 7fa90bcb2351da9bc27562ec0ec4fdf2c83587fa + + Pieter Simoens + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e45c28f05c0bcb63e65db4b8569498de035baa88 + + + + + + + Hiroaki Wagatsuma + Hiroaki Wagatsuma + + + + Hiroaki Wagatsuma + + + + + + + + + + + + + + + + + + World Wide Web Consortium (W3C) + + + World Wide Web Consortium (W3C) + + + + + World Wide Web Consortium (W3C) + + + + + + Giulio Curioni + 7319659db24dc0b8e33a27d1b3f94bd91ac521af + Giulio Curioni + + + + + + + + + Giulio Curioni + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An Extensible Linear Approach For Holistic Ontology Matching + + Holistic Ontology Matching + + + + + + + + + + Resolving the semantic heterogeneity in the semantic web requires finding correspondences between ontologies describing resources. In particular, with the explosive growth of data sets in the Linked Open Data, linking multiple vocabularies and ontologies simultaneously, known as holistic matching problem, become necessary. Currently, most state-of-the-art matching approaches are limited to pairwise matching. In this paper, we propose an approach for holistic ontology matching that is modeled through a linear program extending the maximum-weighted graph matching problem with linear constraints (cardinality, structural, and coherence constraints). Our approach guarantees the optimal solution with mostly coherent alignments. To evaluate our proposal, we discuss the results of experiments performed on the Conference track of the OAEI 2015, under both holistic and pairwise matching settings. + Combinatorial optimisation + + + Linear programming + Combinatorial optimisation + An Extensible Linear Approach For Holistic Ontology Matching + Resolving the semantic heterogeneity in the semantic web requires finding correspondences between ontologies describing resources. In particular, with the explosive growth of data sets in the Linked Open Data, linking multiple vocabularies and ontologies simultaneously, known as holistic matching problem, become necessary. Currently, most state-of-the-art matching approaches are limited to pairwise matching. In this paper, we propose an approach for holistic ontology matching that is modeled through a linear program extending the maximum-weighted graph matching problem with linear constraints (cardinality, structural, and coherence constraints). Our approach guarantees the optimal solution with mostly coherent alignments. To evaluate our proposal, we discuss the results of experiments performed on the Conference track of the OAEI 2015, under both holistic and pairwise matching settings. + An Extensible Linear Approach For Holistic Ontology Matching + + Linear programming + + Holistic Ontology Matching + + + + + + + + + + + + + + + + + + + + + 89ff2466008411464749069a197222abbfec05f3 + + + + + + Pedro Szekely + Pedro Szekely + + Pedro Szekely + + + + + + + + + + + + + + + + + + + + + + + + + Aidan Hogan + b332a58e34e3ddc201c4684233761a53b1d57050 + + + Aidan Hogan + + + + + + + + Aidan Hogan + + + + + SPARQL 1.1 + + + + active learning + + + SPARQL 1.1 + + active learning + An On-Line Learning to Query System + We present an on-line system which learns a SPARQL query from a set of wanted and a set of unwanted results of the query. The sets are extended during a dialog with the user. The system leverages SPARQL 1.1 and does not depend on any particular RDF graph. + An On-Line Learning to Query System + + An On-Line Learning to Query System + learning to query + + + + + + learning to query + We present an on-line system which learns a SPARQL query from a set of wanted and a set of unwanted results of the query. The sets are extended during a dialog with the user. The system leverages SPARQL 1.1 and does not depend on any particular RDF graph. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Materializing the editing history of Wikipedia as linked Data in DBpedia + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Fabien Gandon + Materializing the editing history of Wikipedia as linked Data in DBpedia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cristian Riveros + + + + + + Cristian Riveros + 01d4b6f7e0c1ef0e74365bb317ad450b974f3afe + + + + Cristian Riveros + + + + + + + + + + + + + + + + + + + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + 2016-10-19T21:00:00 + Junzhao Zhang, Xiaowang Zhang and Zhiyong Feng + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:15:00 + Muhammad Amith + 2016-10-18T11:30:00 + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + 2016-10-18T11:15:00 + 2016-10-18T11:30:00 + 2016-10-18T11:30:00 + + 2016-10-18T11:30:00 + + + + Gisela Klette + + + + + + + + + + e37a1ff2c8b04e94ecc7673ce15103b397fd168e + + Gisela Klette + + Gisela Klette + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T12:00:00 + + + 2016-10-19T12:20:00 + Freddy Brasileiro, Joao Paulo Almeida, Victorio Albani Carvalho and Giancarlo Guizzardi + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + + Expressive Multi-Level Modeling for the Semantic Web + 2016-10-19T12:20:00 + 2016-10-19T12:00:00 + Expressive Multi-Level Modeling for the Semantic Web + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Martin G. Skjæveland + + + + + + + + Martin G. Skjæveland + 5223e81829088aa837295fab98f3c286b8f106a2 + + Martin G. Skjæveland + + + + + + + NEL + + DBpedia + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + DBpedia + + NEL + + + + visualization + + + + + + When searching for an arbitrary subject in weblogs or archives, users often don’t get the information they are really looking for. Often they are overwhelmed with an overflow of information while sometimes the presented information is too scarce to make any use of it. Without further knowledge about the context or background of the intended subject users are easily frustrated because they either cannot handle the amount of information or they might give up because they cannot make sense of the topic at all. Furthermore, authors of online-platforms often deal with the issue to provide useful recommendations of other articles and to motivate the readers to stay on the platform to explore more of the available but most times hidden content of their blog or archive. +In the demo presentation, we present refer, a semantic annotation and visualization system integrated into the Wordpress platform. With refer, content creators are enabled to (semi-)automatically annotate their texts with DBpedia resources as part of the original writing process and visualize them automatically. With refer users are encouraged to take an active part in discovering a platform’s information content interactively and intuitively, rather than just to have to read the entire textual information provided by the author. They can discover background information as well as relationships among persons, places, events, and anything related to the subject in current focus and are inspired to navigate the previously hidden information on a platform. + + When searching for an arbitrary subject in weblogs or archives, users often don’t get the information they are really looking for. Often they are overwhelmed with an overflow of information while sometimes the presented information is too scarce to make any use of it. Without further knowledge about the context or background of the intended subject users are easily frustrated because they either cannot handle the amount of information or they might give up because they cannot make sense of the topic at all. Furthermore, authors of online-platforms often deal with the issue to provide useful recommendations of other articles and to motivate the readers to stay on the platform to explore more of the available but most times hidden content of their blog or archive. +In the demo presentation, we present refer, a semantic annotation and visualization system integrated into the Wordpress platform. With refer, content creators are enabled to (semi-)automatically annotate their texts with DBpedia resources as part of the original writing process and visualize them automatically. With refer users are encouraged to take an active part in discovering a platform’s information content interactively and intuitively, rather than just to have to read the entire textual information provided by the author. They can discover background information as well as relationships among persons, places, events, and anything related to the subject in current focus and are inspired to navigate the previously hidden information on a platform. + + + + + visualization + + + annotation + + annotation + + + + + + + + + + visual knowledge representation + + + + visual modeling + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + + OWL2 + graphical ontological syntax + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + + + visual modeling + + + + graphical ontological syntax + graphical language + graphical ontology + ontology + + + graphical language + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + Ontological syntax standardized by the W3C offer the expressiveness needed in the formulation of complex concepts. However, the codification of an ontology is a process of formalization of thought that sometimes requires extensive knowledge and is often inaccessible in the layperson's logic. The G-OWL (for Graphical OWL) language has been designed to provide a tool to facilitate the expression of knowledge in a manner that is compatible with the OWL2 ontolo-gy. This paper presents the OntoCASE4G-OWL prototype, a visual modeling software for the editing of formal ontologies in G-OWL and their translation into Turtle. The executable version of OntoCASE for Windows and MacOsX is available at http://www.cotechnoe.com/iswc2016 + visual knowledge representation + + graphical ontology + + OWL2 + + ontology + Ontological syntax standardized by the W3C offer the expressiveness needed in the formulation of complex concepts. However, the codification of an ontology is a process of formalization of thought that sometimes requires extensive knowledge and is often inaccessible in the layperson's logic. The G-OWL (for Graphical OWL) language has been designed to provide a tool to facilitate the expression of knowledge in a manner that is compatible with the OWL2 ontolo-gy. This paper presents the OntoCASE4G-OWL prototype, a visual modeling software for the editing of formal ontologies in G-OWL and their translation into Turtle. The executable version of OntoCASE for Windows and MacOsX is available at http://www.cotechnoe.com/iswc2016 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lu Fang + + + 580ad48decd01939ed83f81cd1cc251f075acd16 + + Lu Fang + + + + + + + Lu Fang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T17:00:00 + 2016-10-21T17:30:00 + 2016-10-21T17:30:00 + 2016-10-21T17:30:00 + Closing Ceremony + 2016-10-21T17:00:00 + Closing Ceremony + 2016-10-21T17:30:00 + + + + + + 1e115fa957774cf5b84116f2a7980e75e9e27e12 + + + Rob Brennan + + + Rob Brennan + + + + Rob Brennan + + + + Charalampos Nikolaou + + + cf10b48a3376e8b7fd4cd5339e1d1c07dd45c385 + + Charalampos Nikolaou + Charalampos Nikolaou + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Shirley Elprama + + Shirley Elprama + + + + Shirley Elprama + + + + + deeb7cdf9dfc63ebf0bb2298c1e0a0291acb6fab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lihua Zhao + Lihua Zhao + + + + 6d0e8d810faa0e47a21d7427b741f2582ceff299 + + + Lihua Zhao + + + + + + + + + + Esteban Zimanyi + + + + + + + Esteban Zimanyi + a0fae1dc95e881b71bcb87824953400bcbd6ece9 + Esteban Zimanyi + + + + + + + Robin Keskisärkkä + + + + + + + Robin Keskisärkkä + 3ffe081e1a2621393470ef1ac01821417b32f7d1 + + + Robin Keskisärkkä + + + + + + + Valentina Presutti + 8bd6e0316a77a5ac133d65203c8592d80da602e5 + Presutti + + + + + + + Valentina Presutti + + + Valentina Presutti + Valentina + + + + + + Nandana Mihindukulasooriya + + + bff064a0c7a67911d341d06c62778934f5aac111 + + + + + Nandana Mihindukulasooriya + + + Nandana Mihindukulasooriya + + + + + + + + + + + + + + + + + Anthony Potter + 66571c46dfb54fa4abfb80baa667601bcecf5b89 + + + + + Anthony Potter + + + + + Anthony Potter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Michael Rossman + + + e09aaaadde03c747807284adab837bef936f5936 + Michael Rossman + + + + Michael Rossman + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The Institute of Scientific and Industrial Research (ISIR), Osaka University + + + The Institute of Scientific and Industrial Research (ISIR), Osaka University + + + + + The Institute of Scientific and Industrial Research (ISIR), Osaka University + + + + + + + + + + The University of Manchester + + + + + + The University of Manchester + The University of Manchester + + + + + + Public Administration + Local Affairs + + + + Linked Data + + Local Council Decisions as Linked Data: a proof of concept + + Base registries are trusted authentic information sources controlled by an appointed public administration or organization appointed by the government. Maintaining a base registry comes with extra maintenance costs to create the dataset and keep it up to date. In this paper, we study the possibility to entangle the maintenance of base registries at the core of existing administrative processes and to reduce the cost of maintaining a new data source. We demonstrate a method to manage Local Council Decisions as Linked Data, which creates a new base registry for mandates. We found that no extra effort was needed in the process by local administrations. We show that an end-to-end approach for Local Council Decisions as Linked Data is feasible. Furthermore, using this proof of concept, we established a momentum to roll out these ideas for the region of Flanders in Belgium. + + + Local Council Decisions as Linked Data: a proof of concept + + + Local Affairs + + + Public Administration + + + + Local Council Decisions as Linked Data: a proof of concept + + Digital Publishing + Base registries are trusted authentic information sources controlled by an appointed public administration or organization appointed by the government. Maintaining a base registry comes with extra maintenance costs to create the dataset and keep it up to date. In this paper, we study the possibility to entangle the maintenance of base registries at the core of existing administrative processes and to reduce the cost of maintaining a new data source. We demonstrate a method to manage Local Council Decisions as Linked Data, which creates a new base registry for mandates. We found that no extra effort was needed in the process by local administrations. We show that an end-to-end approach for Local Council Decisions as Linked Data is feasible. Furthermore, using this proof of concept, we established a momentum to roll out these ideas for the region of Flanders in Belgium. + + + Linked Data + + + + + Digital Publishing + + + + + + Linked Corporations Data in Japan + + Based on Open Data Charter of G8, the governments are publishing corporation register data as Open Data. In Japan, the government recently published a dataset covering approximately 4.4 million corporations, but the dataset is rated as 3 star in the 5-star rating system. Our policy, which we believe is also common in the LOD community, is that low-star datasets must be converted into 5 star as early as possible for strengthening the power of LOD. Based on this policy, we designed a schema for corporation data, converted the Japanese dataset into 5 star using this schema, and published this dataset under Creative Commons Attribution 4.0 License on 9th December 2015, only eight days after the publication date of the original dataset. As far as we know, eight datasets currently refer to ours, which makes the degree of 5 star stronger. As a business purpose, we internally appended links between our dataset and other data such as DBpedia, and applied this enriched data to a visualization system for browsing a corporation from various perspectives. + + + Based on Open Data Charter of G8, the governments are publishing corporation register data as Open Data. In Japan, the government recently published a dataset covering approximately 4.4 million corporations, but the dataset is rated as 3 star in the 5-star rating system. Our policy, which we believe is also common in the LOD community, is that low-star datasets must be converted into 5 star as early as possible for strengthening the power of LOD. Based on this policy, we designed a schema for corporation data, converted the Japanese dataset into 5 star using this schema, and published this dataset under Creative Commons Attribution 4.0 License on 9th December 2015, only eight days after the publication date of the original dataset. As far as we know, eight datasets currently refer to ours, which makes the degree of 5 star stronger. As a business purpose, we internally appended links between our dataset and other data such as DBpedia, and applied this enriched data to a visualization system for browsing a corporation from various perspectives. + + Linked Open Data + + + + + Linked Corporations Data in Japan + dataset + + + + + Linked Corporations Data in Japan + + + + + LOD4ALL + Linked Open Data + corporations + + + LOD4ALL + + + + + + corporations + dataset + + + Agnieszka Ławrynowicz + + Agnieszka Ławrynowicz + + + Agnieszka Ławrynowicz + d16c46a8e688f893db4910fdeca8a346035e09ae + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 76a9829880fc781bb0d0ce09b671bf79b33d4a3c + + + Helen Reeves + Helen Reeves + Helen Reeves + + + + + Mathieu d'Aquin + + + + + Mathieu d'Aquin + + a73100718bd7c602efaec548fc543acb5aef76d5 + + + + + + Mathieu d'Aquin + + + + + + + + + + + + + + + + + + Pierre-Yves Vandenbussche + + + + + + + Pierre-Yves Vandenbussche + Pierre-Yves Vandenbussche + + + + + 4d190ad7bf441f2c9a4885751c3fa65a85110661 + + + + + + + + + + + + Ricardo Pietrobon + + + Ricardo Pietrobon + Ricardo Pietrobon + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Steffen Lamparter + Steffen Lamparter + + + + + 603f0c9b3231e550cd94e059a2ec26616be599c7 + + Steffen Lamparter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Juan Manuel Gimeno + Juan Manuel Gimeno + + + + + Juan Manuel Gimeno + + + + + + + + + + + + + Peter Hendler + + + + 3f976a0e014e69a54455d23e4808330fac42a53d + + + + Peter Hendler + Peter Hendler + + + + + + + + + + + + + + + + + + + + + + + + + + + Shusaku Egami + + Shusaku Egami + e2ee4b12fe2f488ecef3c70e41b6045ab97b12d3 + + + + + + + Shusaku Egami + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ontology Translation + + Collaborative Ontology Management Platform + + + + + + + + + + + + Context-based Concept Translation + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +Since manual multilingual enhancement of domain-specific ontologies is very time consuming and expensive, smart solutions are required to facilitate the translation task for the language and domain experts. +For this reason, we present ESSOT, an Expert Supporting System for Ontology Translation, which support experts in accomplishing the multilingual ontology management task. Differently than the classic document translation, ontology label translation faces highly specific vocabulary and lack contextual information. +Therefore, ESSOT takes advantage of the semantic information of the ontology for translation improvement of the ontology labels. + + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +Since manual multilingual enhancement of domain-specific ontologies is very time consuming and expensive, smart solutions are required to facilitate the translation task for the language and domain experts. +For this reason, we present ESSOT, an Expert Supporting System for Ontology Translation, which support experts in accomplishing the multilingual ontology management task. Differently than the classic document translation, ontology label translation faces highly specific vocabulary and lack contextual information. +Therefore, ESSOT takes advantage of the semantic information of the ontology for translation improvement of the ontology labels. + + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + Context-based Concept Translation + Multilingual Ontology Management + + Collaborative Ontology Management Platform + + Multilingual Ontology Management + Ontology Translation + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + SOMM: Industry Oriented Ontology Management Tool + 2016-10-19T21:00:00 + SOMM: Industry Oriented Ontology Management Tool + + 2016-10-19T21:00:00 + Evgeny Kharlamov, Bernardo Cuenca Grau, Ernesto Jimenez-Ruiz, Steffen Lamparter, Gulnar Mehdi, Martin Ringsquandl, Yavor Nenov, Stephan Grimm, Mikhail Roshchin and Ian Horrocks + + + + + + + + + + + + + + + + + + + + + + + + + + + + 96aef08cb0381e842c8367abf32eb3859f4d3e12 + + + Jinhyun Ahn + Jinhyun Ahn + + + Jinhyun Ahn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Aliaksandr Birukou + 43240a6cc1be58ceb19806178c6ed246e3e13713 + + + + + + Aliaksandr Birukou + + + + + + + Aliaksandr Birukou + + + + + Corvinus University of Budapest + Corvinus University of Budapest + + Corvinus University of Budapest + + + + + + + + + + + + + + + Rivindu Perera + Rivindu Perera + 9f521f8d647fafec7a6583fb2ba20e4058b63b57 + + + Rivindu Perera + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Edinburgh + University of Edinburgh + + University of Edinburgh + + + + University of Aberdeen + + University of Aberdeen + + + + + + + University of Aberdeen + + + + + + + + + + + + + + + + + + + + Stefan Dietze + + + + + Stefan Dietze + + + + + + ee6dc5ffc8da2b150fab3da1bcf3d788011c3312 + + + Stefan Dietze + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The multiset semantics of SPARQL patterns + + SPARQL + Relational Algebra + + SPARQL + Relational Algebra + + The paper determines the algebraic and logic structure produced by the multiset semantics of the core patterns of SPARQL. We prove that the fragment formed by AND, UNION, OPTIONAL, FILTER, MINUS and SELECT corresponds precisely to both, the intuitive multiset relational algebra (projection, selection, natural join, arithmetic union and except), and multiset classical non-recursive Datalog with safe negation. + + The multiset semantics of SPARQL patterns + Datalog + Bag semantics + + + + The multiset semantics of SPARQL patterns + Bag semantics + + Datalog + + + + + + The paper determines the algebraic and logic structure produced by the multiset semantics of the core patterns of SPARQL. We prove that the fragment formed by AND, UNION, OPTIONAL, FILTER, MINUS and SELECT corresponds precisely to both, the intuitive multiset relational algebra (projection, selection, natural join, arithmetic union and except), and multiset classical non-recursive Datalog with safe negation. + + + + + Saltlux, Inc. + + Saltlux, Inc. + + + + Saltlux, Inc. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Francesco Osborne, Angelo Antonio Salatino, Aliaksandr Birukou and Enrico Motta + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + 2016-10-19T18:00:00 + + + + + + + + Masaru Miyazaki + Masaru Miyazaki + 405430333261b37dafd78840368a8789072ebd52 + + + + Masaru Miyazaki + + + + + + + + + + + + + + + + Karlsruhe Institute of Technology (KIT) + + + + + + + Karlsruhe Institute of Technology (KIT) + Karlsruhe Institute of Technology (KIT) + + + + + + + + + + + Kerry Taylor + Kerry Taylor + + + + f7a1125fd2e8a1dff398e6023848973a6337e9ab + + + Kerry Taylor + + + + + + + + + + + + + + + + + + + Giorgia Lodi + + + + Giorgia Lodi + + + + Giorgia Lodi + + 376b8d67cc6249aff3f746e002d2660be7cf104d + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + 2016-10-19T11:00:00 + 2016-10-19T11:20:00 + + 2016-10-19T11:00:00 + 2016-10-19T11:20:00 + + Alberto Tonon, Djellel Eddine Difallah, Victor Felder and Philippe Cudré-Mauroux + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + 2016-10-19T11:20:00 + + 2016-10-19T11:20:00 + + + 2016-10-19T21:00:00 + What if machines could be creative? + + + 2016-10-19T18:00:00 + Fabian M. Suchanek, Colette Menard, Meghyn Bienvenu and Cyril Chapellier + 2016-10-19T21:00:00 + What if machines could be creative? + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Flemish Agency for Domestic Governance + Flemish Agency for Domestic Governance + + + Flemish Agency for Domestic Governance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + fd3e838e2512e44a55c1ff356fb5fe97502b2aa4 + + + + + + Yu Hirate + Yu Hirate + Yu Hirate + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Damian Bursztyn, Francois Goasdoue and Ioana Manolescu + 2016-10-19T21:00:00 + Optimizing FOL reducible query answering: understanding performance challenges + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Optimizing FOL reducible query answering: understanding performance challenges + + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + f38cdca307f19938c201d780003037ae67c1dd4c + Akane Takezaki + + Akane Takezaki + + + Akane Takezaki + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Jacopo Urbani, Ceriel Jacobs and Markus Krötzsch + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + Ben De Meester + + Ben De Meester + Ben De Meester + + + + + + + + + ad8b2ae36dc69d174133fd2fd63d13c583f646c3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Southern California + + University of Southern California + + + + + + University of Southern California + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 8b3a38ec6a3728520f921739067cef070b173a9c + + + Patrick Lambrix + Patrick Lambrix + + + Patrick Lambrix + + + + + + Yasunori Yamamoto + 2ab8956fa93aef8534b3bc3d9d540a3f9395c96f + + + Yasunori Yamamoto + + + + Yasunori Yamamoto + + + + + + + + 2016-10-19T21:00:00 + Exploring Linked Classical Music Catalogs with OVERTURE + + Exploring Linked Classical Music Catalogs with OVERTURE + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Pasquale Lisena, Manel Achichi, Eva Fernandez, Konstantin Todorov and Raphaël Troncy + + 2016-10-19T18:00:00 + + + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + + + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + 2016-10-21T10:50:00 + + 2016-10-21T10:50:00 + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + Gong Cheng, Daxin Liu and Yuzhong Qu + + + + + + + + + + + + + + + + Konstantin Todorov + 5c4bd7cfa4123af996e157c664660c68c0237209 + + + + Konstantin Todorov + + + + + Konstantin Todorov + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Yuzu: Publishing Any Data as Linked Data + 2016-10-19T18:00:00 + John P. Mccrae + Yuzu: Publishing Any Data as Linked Data + 2016-10-19T21:00:00 + + + + + + + + + + + + + 9da436ca83fe1f6c31b15af200be0fee8e29faca + + + + Miel Vander Sande + + Miel Vander Sande + + + + Miel Vander Sande + + + + + + + + + + + + + + + + + Universidad Técnica Federico Santa María + Universidad Técnica Federico Santa María + Universidad Técnica Federico Santa María + + + + + + + + + + + + 2016-10-20T10:00:00 + 2016-10-20T10:30:00 + 2016-10-20T10:30:00 + 2016-10-20T10:30:00 + 2016-10-20T10:30:00 + Coffee Break + 2016-10-20T10:00:00 + Coffee Break + + + + + + + + + + + + 2016-10-19T21:00:00 + + + SWISH: An Integrated Semantic Web Notebook + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + SWISH: An Integrated Semantic Web Notebook + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Wouter Beek and Jan Wielemaker + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + Tabea Tietz + + + Tabea Tietz + + + Tabea Tietz + + + e0e07bbfbb86dcd6b64b8a0b2d502884c5770bb3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nanjing University + Nanjing University + + + + + Nanjing University + + + + 2016-10-20T14:50:00 + 2016-10-20T15:30:00 + 2016-10-20T15:30:00 + 2016-10-20T15:30:00 + 2016-10-20T14:50:00 + Coffee Break + 2016-10-20T15:30:00 + Coffee Break + + + An Ontology of Soil Properties and Processes + 2016-10-21T14:10:00 + An Ontology of Soil Properties and Processes + + 2016-10-21T13:50:00 + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + + Heshan Du, Vania Dimitrova, Derek Magee, Anthony Cohn, Ross Stirling, Giulio Curioni, Barry Clarke and Helen Reeves + + + + + + + + + + + + + + 2016-10-21T11:30:00 + Efstratios Sygkounas, Giuseppe Rizzo and Raphaël Troncy + + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + 2016-10-21T11:30:00 + + + 2016-10-21T11:30:00 + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + 2016-10-21T11:30:00 + + + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Konstantina Bereta, Guohui Xiao, Manolis Koubarakis, Martina Hodrius and Conrad Bielski + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + PIOTRe: Personal Internet of Things Repository + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Eugene Siow, Thanassis Tiropanis and Wendy Hall + + + PIOTRe: Personal Internet of Things Repository + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Angelos Charalambidis + 211297be068db0fb4d1d58f1d8168d20ef0a06e2 + Angelos Charalambidis + + + + + + + + + Angelos Charalambidis + + + + ff749127c207b8bc7d463999eea7531e0d7d9aba + + + + + Arild Waaler + Arild Waaler + Arild Waaler + + + + + + + + + + + + + + + + + Cassia Trojahn + + + + acb31a208c24f68142fdeba467938e4621e6d836 + Cassia Trojahn + + + + + + + + Cassia Trojahn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + + relation prediction + + information extraction + + + commonsense knowledge + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + + information extraction + + + While massive volumes of text are now more easily available for knowledge harvesting, many important facts about our everyday world are not expressed in a particularly explicit way. To address this, we present WebBrain, a new approach for harvesting commonsense knowledge that relies on joint learning from Web-scale data to fill gaps in the knowledge acquisition. We train a neural network model that not only learns word2vec-style vector representations of words but also commonsense knowledge about them. This joint model allows general semantic information to aid in generalizing beyond the extracted commonsense relationships. Experiments show that we can obtain word embeddings that reflect word meanings, yet also allow us to capture conceptual relationships and commonsense knowledge about them. + + + + + commonsense knowledge + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + relation prediction + While massive volumes of text are now more easily available for knowledge harvesting, many important facts about our everyday world are not expressed in a particularly explicit way. To address this, we present WebBrain, a new approach for harvesting commonsense knowledge that relies on joint learning from Web-scale data to fill gaps in the knowledge acquisition. We train a neural network model that not only learns word2vec-style vector representations of words but also commonsense knowledge about them. This joint model allows general semantic information to aid in generalizing beyond the extracted commonsense relationships. Experiments show that we can obtain word embeddings that reflect word meanings, yet also allow us to capture conceptual relationships and commonsense knowledge about them. + + + + + + + + + + + + + + + + + + 2016-10-21T15:30:00 + 2016-10-21T15:30:00 + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + 2016-10-21T15:50:00 + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + + 2016-10-21T15:50:00 + 2016-10-21T15:50:00 + 2016-10-21T15:50:00 + David Carral, Cristina Feier and Pascal Hitzler + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + Aemoo: Linked Data exploration based on Knowledge Patterns + + 2016-10-20T15:50:00 + Andrea Giovanni Nuzzolese, Valentina Presutti, Aldo Gangemi, Silvio Peroni and Paolo Ciancarini + 2016-10-20T16:10:00 + Aemoo: Linked Data exploration based on Knowledge Patterns + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Karl Aberer + + + + Karl Aberer + + a9877790616eb28af52fd602e67b0dbeb50f5399 + + + Karl Aberer + + + + + + + + + STIM + STIM + + + + + + + STIM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + semantic web + + + semantic label + + + + linked data + Mapping data to a shared domain ontology is a key step in publishing semantic content on the Web. Most of the work on automatically mapping structured and semi-structured sources to ontologies focuses on semantic labeling, i.e., annotating data fields with ontology classes and/or properties. However, a precise mapping that fully recovers the intended meaning of the data needs to describe the semantic relations between the data fields too. We present a novel approach to automatically discover the semantic relations within a given data source. We mine the small graph patterns occurring in Linked Open Data and combine them to build a graph that will be used to infer semantic relations. We evaluated our approach on datasets from different domains. Mining patterns of maximum length five, our method achieves an average precision of 75% and recall of 77% for a dataset with very complex mappings to the domain ontology, increasing up to 86% and 82%, respectively, for simpler ontologies and mappings. + semantic model + Leveraging Linked Data to Discover Semantic Relations within Data Sources + + semantic relation + Mapping data to a shared domain ontology is a key step in publishing semantic content on the Web. Most of the work on automatically mapping structured and semi-structured sources to ontologies focuses on semantic labeling, i.e., annotating data fields with ontology classes and/or properties. However, a precise mapping that fully recovers the intended meaning of the data needs to describe the semantic relations between the data fields too. We present a novel approach to automatically discover the semantic relations within a given data source. We mine the small graph patterns occurring in Linked Open Data and combine them to build a graph that will be used to infer semantic relations. We evaluated our approach on datasets from different domains. Mining patterns of maximum length five, our method achieves an average precision of 75% and recall of 77% for a dataset with very complex mappings to the domain ontology, increasing up to 86% and 82%, respectively, for simpler ontologies and mappings. + semantic web + + + + Leveraging Linked Data to Discover Semantic Relations within Data Sources + + semantic relation + + + semantic label + + + Leveraging Linked Data to Discover Semantic Relations within Data Sources + linked data + + + semantic model + + + + + + + + + + + + + + + + + + + INRIA / LIX, Ecole Polytechnique + INRIA / LIX, Ecole Polytechnique + + + + + + + + INRIA / LIX, Ecole Polytechnique + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T11:50:00 + 2016-10-21T13:30:00 + 2016-10-21T13:30:00 + 2016-10-21T13:30:00 + 2016-10-21T11:50:00 + Lunch + 2016-10-21T13:30:00 + Lunch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Riichiro Mizoguchi + Riichiro Mizoguchi + + + + + + Riichiro Mizoguchi + + + + + + Yasmin Alam-Faruque + + + + + + + + Yasmin Alam-Faruque + 691961f7af28ec05eb58dfcbb5e0a849b7355a2f + + + Yasmin Alam-Faruque + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Gofran Shukair + + + Gofran Shukair + + Gofran Shukair + + + 917192eb73c5e01779552c969e0fa17aa3b08d14 + + + + + + + + + + + + + + + + + + + + + + + Institut Supérieur d'Electronique de Paris + + Institut Supérieur d'Electronique de Paris + + + + Institut Supérieur d'Electronique de Paris + + + + + + + + + + + + Guillermo Vega-Gorgojo + Guillermo Vega-Gorgojo + + + + + + + + Guillermo Vega-Gorgojo + + + + + + + + + + + + + + + + Vangelis Karkaletsis + + Vangelis Karkaletsis + + + + Vangelis Karkaletsis + + + 7057e10b42006fe77e51f3e67c0ee26c90f94814 + + + + + + 2016-10-19T21:00:00 + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + 2016-10-19T18:00:00 + Michel Buffa, Catherine Faron Zucker, Thierry Bergeron and Hatim Aouzal + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Department of Informatics, University of Oslo + + + Department of Informatics, University of Oslo + + Department of Informatics, University of Oslo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vincent Link + + + + + Vincent Link + daab7413b065316d22855f8762dcc095c0fe41c2 + + + Vincent Link + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Takahiro Kawamura + + + + 286baaa4c43daf1cc1c760192728f26ca8e529a2 + + Takahiro Kawamura + Takahiro Kawamura + + + + + + + + + + + + + + + + + + + + + + + + + + + + + National Institute of Advanced Industrial Science and Technology (AIST) + National Institute of Advanced Industrial Science and Technology (AIST) + + + National Institute of Advanced Industrial Science and Technology (AIST) + + + ontology + + + + semantic web + + ontology + audio effects + + + + + + + + + + semantic web + audio effects + + This paper introduces the Audio Effects Ontology (AUFX-O) building on previous theoretical models describing audio processing units and workflows in the context of music production. We discuss important conceptualisations of different abstraction layers, their necessity to successfully model audio effects, and their application method. We present use cases concerning the application of effects in music production projects, and the creation of audio effect metadata facilitating a linked data service exposing information about effect implementations. By doing so, we show how our model benefits knowledge sharing, and enables reproducibility and analysis of audio production workflows. + + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + music production + + music production + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + This paper introduces the Audio Effects Ontology (AUFX-O) building on previous theoretical models describing audio processing units and workflows in the context of music production. We discuss important conceptualisations of different abstraction layers, their necessity to successfully model audio effects, and their application method. We present use cases concerning the application of effects in music production projects, and the creation of audio effect metadata facilitating a linked data service exposing information about effect implementations. By doing so, we show how our model benefits knowledge sharing, and enables reproducibility and analysis of audio production workflows. + + + + + + + + + + + + + + + + + + + + + Nobuyuki Igata + + + + 8e617ac525d55754ec759d00990dadc08dc38657 + + Nobuyuki Igata + + Nobuyuki Igata + + + Jan Mendling + + + + 15ada2ef51f9122f07cadca9899ba9e07b35d11b + + + + + + Jan Mendling + Jan Mendling + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + + 2016-10-19T18:00:00 + Ruben Taelman, Pieter Heyvaert, Ruben Verborgh, Erik Mannens and Rik Van de Walle + 2016-10-19T21:00:00 + + + + + + + + + NTT Resonant Inc. + NTT Resonant Inc. + + + + NTT Resonant Inc. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Daniel Gerber + + + + + + Daniel Gerber + + Daniel Gerber + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Seungjun Yoon + Seungjun Yoon + + + + + + + + Seungjun Yoon + + f12185c1fa207ce21260cd547bf7421c33a2194c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Birkbeck, University of London + + + + Birkbeck, University of London + + + + + Birkbeck, University of London + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Juan L. Reutter + + + + + 7fdaf70b3acd2223eb51d604ddffe21f5aab775e + + + Juan L. Reutter + Juan L. Reutter + + + + + + + + + + + + + + + + + + + + + + + + + Alo Allik, György Fazekas and Mark Sandler + 2016-10-21T14:30:00 + 2016-10-21T14:10:00 + Ontological representation of audio features + 2016-10-21T14:30:00 + + + + Ontological representation of audio features + 2016-10-21T14:10:00 + 2016-10-21T14:30:00 + 2016-10-21T14:30:00 + + + + + + + + + + + + 28a0f82609671f47d811e6bee865afb23abfb8db + Enrico Motta + + + Enrico Motta + + + + + Enrico Motta + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:40:00 + Visual query interfaces for semantic datasets: an evaluation study + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + 2016-10-19T12:00:00 + 2016-10-19T12:00:00 + Guillermo Vega-Gorgojo, Laura Slaughter, Martin Giese, Simen Heggestøyl, Ahmet Soylu and Arild Waaler + 2016-10-19T12:00:00 + + Visual query interfaces for semantic datasets: an evaluation study + + + + + + + + + + + + + + Linked Corporations Data in Japan + Shuya Abe, Yutaka Mitsuishi, Shinichiro Tago, Nobuyuki Igata, Seiji Okajima, Hiroaki Morikawa and Fumihito Nishino + 2016-10-19T21:00:00 + + Linked Corporations Data in Japan + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + OWL + The OWL Reasoner Evaluation (ORE) Competition is an annual competition (with an associated workshop) which pits OWL 2 compliant reasoners against each other on various standard reasoning tasks over naturally occurring problems. The 2015 competition was the third of its sort and had 14 reasoners competing in six tracks comprising three tasks (consistency, classification, and realisation) over two profiles (OWL 2 DL and EL). In this paper, we outline the design of the competition and present the infrastructure used for its execution: the corpora of ontologies, the competition framework, and the submitted systems. All resources are publicly available on the Web, allowing users to easily re-run the 2015 competition, or reuse any of the ORE infrastructure for reasoner experiments or ontology analysis. + + + + + The OWL Reasoner Evaluation (ORE) 2015 Resources + + The OWL Reasoner Evaluation (ORE) 2015 Resources + + + + + + reasoning + ontologies + reasoning + + + + + The OWL Reasoner Evaluation (ORE) 2015 Resources + + + The OWL Reasoner Evaluation (ORE) Competition is an annual competition (with an associated workshop) which pits OWL 2 compliant reasoners against each other on various standard reasoning tasks over naturally occurring problems. The 2015 competition was the third of its sort and had 14 reasoners competing in six tracks comprising three tasks (consistency, classification, and realisation) over two profiles (OWL 2 DL and EL). In this paper, we outline the design of the competition and present the infrastructure used for its execution: the corpora of ontologies, the competition framework, and the submitted systems. All resources are publicly available on the Web, allowing users to easily re-run the 2015 competition, or reuse any of the ORE infrastructure for reasoner experiments or ontology analysis. + OWL + + ontologies + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ad0c7d68490b84d6c7f8b0cb8aa1e457559386ef + Harith Alani + + Harith Alani + + + Harith Alani + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + NCSR Demokritos + + NCSR Demokritos + + + + + + + NCSR Demokritos + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Sensor Data Generation using Queryable RML Mappings + Linked Sensor Data Generation using Queryable RML Mappings + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Pieter Heyvaert, Ruben Taelman, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Bonn & Fraunhofer IAIS + + University of Bonn & Fraunhofer IAIS + + University of Bonn & Fraunhofer IAIS + + + + + + + + + 2016-10-20T10:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Querying/SPARQL (I) + 2016-10-20T10:30:00 + Querying/SPARQL (I) + 2016-10-20T11:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + Web of Things + + + + Semantic Web of Things + Semantic Web + Semantic Web + + Semantic Web of Things + + + Semantic Web of Things(SWoT) applications focus on providing a wide-scale interoperability that allows the sharing of IoT devices across domains and the reusing of available knowledge on the web. However, the application development is difficult because developers have to do various tasks such as designing an application, annotating IoT data, interpreting data, and combining application domains. + +To address the above challenges, this paper demonstrates SWoTSuite, a toolkit for prototyping SWoT applications. It hides the use of semantic web technologies as much as possible to avoid the burden of designing SWoT applications that involves designing ontologies, annotating sensor data, and using reasoning mechanisms to enrich data. Taking inspiration from sharing and reuse approaches, SWoTSuite reuses data and vocabularies. It leverages existing technologies to build applications. We take a hello world naturopathy application as an example and demonstrate an application development process using SWoTSuite. The demo video is available at URL- http://tinyurl.com/zs9flrt. + + + + Software Engineering + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + Software Engineering + Programming framework + + Toolkit + Programming framework + + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + Internet of Things + Web of Things + Internet of Things + + Semantic Web of Things(SWoT) applications focus on providing a wide-scale interoperability that allows the sharing of IoT devices across domains and the reusing of available knowledge on the web. However, the application development is difficult because developers have to do various tasks such as designing an application, annotating IoT data, interpreting data, and combining application domains. + +To address the above challenges, this paper demonstrates SWoTSuite, a toolkit for prototyping SWoT applications. It hides the use of semantic web technologies as much as possible to avoid the burden of designing SWoT applications that involves designing ontologies, annotating sensor data, and using reasoning mechanisms to enrich data. Taking inspiration from sharing and reuse approaches, SWoTSuite reuses data and vocabularies. It leverages existing technologies to build applications. We take a hello world naturopathy application as an example and demonstrate an application development process using SWoTSuite. The demo video is available at URL- http://tinyurl.com/zs9flrt. + Toolkit + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + + + + + + + + Trinity College Dublin + Trinity College Dublin + + + + + Trinity College Dublin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Naoya Arakawa + Naoya Arakawa + Naoya Arakawa + + + + 40648dec21e68f3aeb68f6bde134daeaf72cc9e0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pieter Colpaert + Pieter Colpaert + + + + + 5d1ef2cf8bfda01a622cac02ab7620013f11211b + + Pieter Colpaert + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + Mihael Arcan, Mauro Dragoni and Paul Buitelaar + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + University of Lisbon + + + + + University of Lisbon + + + University of Lisbon + + + + + + + + Amrapali Zaveri + + + Amrapali Zaveri + + + + + Amrapali Zaveri + + + + + + + + + + + + + + + + + + + + + + + + RDF stream + JSON-LD + + TripleWave: Spreading RDF Streams on the Web + + software + TripleWave: Spreading RDF Streams on the Web + RDF stream processing + RDF stream publishing + Processing data streams is increasingly gaining a momentum, given the need to process these flows of information in real time and at Web scale. +In this context, RDF Stream Processing (RSP) and Stream Reasoning (SR) have emerged as solutions to combine semantic technologies with stream and event processing techniques. +Research in these areas has proposed an ecosystem of solutions to query, reason and perform real time processing over heterogeneous and distributed data streams on the Web. +However, so far one basic building block has been missing: a mechanism to disseminate and exchange RDF streams on the Web. +In this work we close this gap, proposing TripleWave, a reusable and generic tool that enables the publication of RDF streams on the Web. +The features of TripleWave have been derived from requirements of real use-cases, and consider a diverse set of scenarios, independent of any specific RSP implementation. +TripleWave can be fed with existing Web streams (e.g. Twitter and Wikipedia streams) or time-annotated RDF datasets (e.g. the LinkedSensorData set), and it can be invoked through both pull- and push-based mechanisms, thus also enabling RSP engines to automatically register and receive data from TripleWave. + + + + + Processing data streams is increasingly gaining a momentum, given the need to process these flows of information in real time and at Web scale. +In this context, RDF Stream Processing (RSP) and Stream Reasoning (SR) have emerged as solutions to combine semantic technologies with stream and event processing techniques. +Research in these areas has proposed an ecosystem of solutions to query, reason and perform real time processing over heterogeneous and distributed data streams on the Web. +However, so far one basic building block has been missing: a mechanism to disseminate and exchange RDF streams on the Web. +In this work we close this gap, proposing TripleWave, a reusable and generic tool that enables the publication of RDF streams on the Web. +The features of TripleWave have been derived from requirements of real use-cases, and consider a diverse set of scenarios, independent of any specific RSP implementation. +TripleWave can be fed with existing Web streams (e.g. Twitter and Wikipedia streams) or time-annotated RDF datasets (e.g. the LinkedSensorData set), and it can be invoked through both pull- and push-based mechanisms, thus also enabling RSP engines to automatically register and receive data from TripleWave. + RDF stream + + + RDF stream publishing + + TripleWave: Spreading RDF Streams on the Web + + + + open source + software + open source + + + + JSON-LD + + + + + + + + + RDF stream processing + + + + + + + + + + + Ceriel Jacobs + + + Ceriel Jacobs + + + + + + + 38f4f77799ce0a5571dd6b38290a3ba890df635d + Ceriel Jacobs + + + + + + + + + + + + John Domingue + + + + John Domingue + John Domingue + + + + + + + + + Fernando Serena + + + c11398fd7784387762f82da84dbc40348f621dfb + Fernando Serena + + + Fernando Serena + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An Extensible Linear Approach For Holistic Ontology Matching + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + Imen Megdiche, Olivier Teste and Cassia Trojahn + + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + 2016-10-20T10:50:00 + + An Extensible Linear Approach For Holistic Ontology Matching + + 2016-10-20T10:50:00 + + + + + + + + + + + + + + + SPARQL + + + + + + + Gize: A Time Warp in the Web of Data + LTL + Historical Data + + SPARQL + We introduce the Gize framework for querying historical RDF data. Gize builds upon two main pillars: a lightweight approach to keep historical data, and an extension of SPARQL called SPARQ–LTL, which incorporates temporal logic primitives to enable a rich class of queries. One striking point of Gize is that its features can be readily made available in existing query processors. + + + LTL + + + + + Gize: A Time Warp in the Web of Data + + Historical Data + We introduce the Gize framework for querying historical RDF data. Gize builds upon two main pillars: a lightweight approach to keep historical data, and an extension of SPARQL called SPARQ–LTL, which incorporates temporal logic primitives to enable a rich class of queries. One striking point of Gize is that its features can be readily made available in existing query processors. + Gize: A Time Warp in the Web of Data + + + + + + + + + + + + + + + + + + + + + + + + + + Seiji Suzuki + Seiji Suzuki + + + + Seiji Suzuki + + 4d5496dd9ba58c00736d43ab97d07d1840a3d1bc + + + + + + + + + + + + Alexander Viehl + b2039d2835c2c32540de3602bea26276fe98ef19 + + + Alexander Viehl + Alexander Viehl + + + + + + + + + + + + reification + + graph databases + + + + relational databases + + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + + + property graphs + + graph databases + In this paper, we experimentally compare the efficiency of various database engines for the purposes of querying the Wikidata knowledge-base, which can be conceptualised as a directed edge-labelled where edges can be annotated with meta-information called qualifiers. We select two popular SPARQL databases (Virtuoso, Blazegraph), a popular relational database (PostgreSQL), and a popular graph database (Neo4J) for comparison and discuss various options as to how Wikidata can be represented in the models of each engine. We design a set of experiments to test the relative query performance of these representations in the context of their respective engines. We first execute a large set of atomic lookups to establish a baseline performance for each test setting, and subsequently perform experiments on instances of more complex graph patterns based on real-world examples. We conclude with a summary of the strengths and limitations of the engines observed. + property graphs + + + + relational databases + + wikidata + reification + + In this paper, we experimentally compare the efficiency of various database engines for the purposes of querying the Wikidata knowledge-base, which can be conceptualised as a directed edge-labelled where edges can be annotated with meta-information called qualifiers. We select two popular SPARQL databases (Virtuoso, Blazegraph), a popular relational database (PostgreSQL), and a popular graph database (Neo4J) for comparison and discuss various options as to how Wikidata can be represented in the models of each engine. We design a set of experiments to test the relative query performance of these representations in the context of their respective engines. We first execute a large set of atomic lookups to establish a baseline performance for each test setting, and subsequently perform experiments on instances of more complex graph patterns based on real-world examples. We conclude with a summary of the strengths and limitations of the engines observed. + + + + sparql + sparql + + wikidata + + + + + + + + + + + + + + + + + Linköping University + + Linköping University + Linköping University + + + + + + + + + + + + + + + + + + + + + + + + + Amit Sheth + + c903202d3919813029e4dc56efbe0a2b2443074c + + + + + Amit Sheth + + + Amit Sheth + + + + + + + + + Practical Algorithms for Ontology Update + + + + + + Instance-level Ontology Update + + Instance-level Ontology Update + + OWL 2 QL + + In this paper we study instance-level update in DL-LiteA, the description logic underlying the OWL 2 QL standard. In particular we focus on formula based approaches to ABox insertion and deletion. We show that DL-LiteA, which is well known for enjoying first-order rewritability of query answering, enjoys a first-order rewritability property also for updates. That is, every update can be reformulated into a set of insertion and deletion instructions computable through a non-recursive DATALOG program. Such a program is readily translatable into a first-order query over the ABox considered as a database, and hence into SQL. Exploiting this result we implement an update component for DL-LiteA-based systems and perform some experiments showing that the approach works in practice. + DL-Lite + DL-Lite + Updating DL-Lite Ontologies through First-Order Queries + + + + OWL 2 QL + + + Updating DL-Lite Ontologies through First-Order Queries + + Updating DL-Lite Ontologies through First-Order Queries + + Practical Algorithms for Ontology Update + In this paper we study instance-level update in DL-LiteA, the description logic underlying the OWL 2 QL standard. In particular we focus on formula based approaches to ABox insertion and deletion. We show that DL-LiteA, which is well known for enjoying first-order rewritability of query answering, enjoys a first-order rewritability property also for updates. That is, every update can be reformulated into a set of insertion and deletion instructions computable through a non-recursive DATALOG program. Such a program is readily translatable into a first-order query over the ABox considered as a database, and hence into SQL. Exploiting this result we implement an update component for DL-LiteA-based systems and perform some experiments showing that the approach works in practice. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Olivier Teste + Olivier Teste + + + + 1c582f2ab56f6e966f4ef04c9b738e8c01964229 + Olivier Teste + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Apache Spark + + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + + + RDF data + + + + + SPARQL evaluators + + We demonstrate SPARQLGX: our implementation of a distributed sparql evaluator. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + Distributed systems + + + + RDF data + + + + SPARQL evaluators + Apache Spark + We demonstrate SPARQLGX: our implementation of a distributed sparql evaluator. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + + + Distributed systems + + + + Roberto Garcia, Rosa Gil, Juan Manuel Gimeno, Eirik Bakke and David Karger + + + 2016-10-19T11:20:00 + Benchmarking End-User Structured Data Search and Exploration + 2016-10-19T11:40:00 + 2016-10-19T11:40:00 + 2016-10-19T11:40:00 + Benchmarking End-User Structured Data Search and Exploration + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + Ontology Concept Search + + + + Ranking + Diversification + + Indexing + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + Diversification + Finding relevant concepts from a corpus of ontologies is useful in many scenarios, including document classification, web page annotation, and automatic ontology population. Millions of concepts are contained in a large number of ontologies across diverse domains. SPARQL-based query demands knowledge of the structure of ontologies and the query language, whereas more user-friendly, simple keyword-based approaches suffer from false positives as concept descriptions in ontologies may be ambiguous and overlapping. In this paper, we propose a keyword-based concept search framework that (1) exploits the structure and semantics in ontologies, by constructing contexts for each concept; (2) generates the interpretations of a query; and (3) balances relevance and diversity of search results. A comprehensive evaluation against both the domain-specific BioPortal and the general-purpose Falcons on widely-used performance metrics demonstrates that our system outperforms both. + Finding relevant concepts from a corpus of ontologies is useful in many scenarios, including document classification, web page annotation, and automatic ontology population. Millions of concepts are contained in a large number of ontologies across diverse domains. SPARQL-based query demands knowledge of the structure of ontologies and the query language, whereas more user-friendly, simple keyword-based approaches suffer from false positives as concept descriptions in ontologies may be ambiguous and overlapping. In this paper, we propose a keyword-based concept search framework that (1) exploits the structure and semantics in ontologies, by constructing contexts for each concept; (2) generates the interpretations of a query; and (3) balances relevance and diversity of search results. A comprehensive evaluation against both the domain-specific BioPortal and the general-purpose Falcons on widely-used performance metrics demonstrates that our system outperforms both. + + + Ranking + + + Indexing + + + + Ontology Concept Search + + + Query Interpretation + Query Interpretation + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + + + + Dag Hovland + + + + + + Dag Hovland + + Dag Hovland + + 30627422b3fe20465030ad86165544107272257c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Open Information Extraction + + + Who-Does-What: A knowledge base of people's occupations and job activities + + + knowledge base induction from text + Knowledge acquisition + + + + + System analysis and design is concerned with the creation of conceptual models. In this paper, we introduce a novel resource called "Who-Does-What" (WDW) that supports the creation and quality assurance of such models. WDW provides a knowledge base of activities for classes of people engaged in a wide range of different occupations. The resource is semi-automatically created by populating the manually-created Standard Occupational Classification (SOC) of the US Department of Labor with activities found on the Web. + + Knowledge acquisition + + + Open Information Extraction + + + System analysis and design is concerned with the creation of conceptual models. In this paper, we introduce a novel resource called "Who-Does-What" (WDW) that supports the creation and quality assurance of such models. WDW provides a knowledge base of activities for classes of people engaged in a wide range of different occupations. The resource is semi-automatically created by populating the manually-created Standard Occupational Classification (SOC) of the US Department of Labor with activities found on the Web. + + Who-Does-What: A knowledge base of people's occupations and job activities + knowledge base induction from text + + Who-Does-What: A knowledge base of people's occupations and job activities + + + + + + + + Bojan Božić + + + + Bojan Božić + + Bojan Božić + + 4fe69f25d9e2dfe3d9dc889a8ec901079b875e3b + + + + + + + + + + + + + + David Karger + + + + + + + David Karger + + + David Karger + + + + + + + + + + + + + + + Satoshi Kume + Satoshi Kume + + + + + + + + + + + f9fb89aaaa81fb8c66880b4929cb250c8857638b + Satoshi Kume + + + + + + + + + + Alasdair Gray + Alasdair Gray + + + + + Alasdair Gray + + + + + + + + + + + + + + + 2016-10-18T14:45:00 + + Towards a distributed, scalable and real-time RDF Stream Processing engine + 2016-10-18T15:00:00 + Towards a distributed, scalable and real-time RDF Stream Processing engine + + 2016-10-18T15:00:00 + 2016-10-18T14:45:00 + 2016-10-18T15:00:00 + Xiangnan Ren + 2016-10-18T15:00:00 + + + + + + + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + + + While automated knowledge base construction so far has largely focused on fully qualified facts, e.g. <Obama, hasChild, Malia>, the Web contains also extensive amounts of cardinality information, such as that someone has two children without giving their names. In this paper we argue that the extraction of such information could substantially increase the scope of knowledge bases. For the sample of the hasChild relation in Wikidata, we show that simple regular-expression based extraction from Wikipedia can increase the size of the relation by 178. We also show how such cardinality information can be used to estimate the recall of knowledge bases. + + Incomplete information + Completeness estimation + + + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + Knowledge bases + + + + Information extraction + Completeness estimation + + Information extraction + Incomplete information + + + Knowledge bases + + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + While automated knowledge base construction so far has largely focused on fully qualified facts, e.g. <Obama, hasChild, Malia>, the Web contains also extensive amounts of cardinality information, such as that someone has two children without giving their names. In this paper we argue that the extraction of such information could substantially increase the scope of knowledge bases. For the sample of the hasChild relation in Wikidata, we show that simple regular-expression based extraction from Wikipedia can increase the size of the relation by 178. We also show how such cardinality information can be used to estimate the recall of knowledge bases. + + + + + + + + + + + + + + + + + + + + + + Despite developments of Semantic Web-enabling technologies, +the gap between non-expert end-users and the Semantic Web still +exists. In the field of semantic content authoring, tools for interacting +with semantic content remain directed at highly trained individuals. This +adds to the challenges of bringing user-generated content into the Semantic +Web. +In this paper, we present Seed, short for Semantic Editor, an extensible +knowledge-supported natural language text composition tool, which +targets non-experienced end-users enabling automatic as well as semiautomatic +creation of standards based semantically annotated textual +content. We point out the structure of Seed, compare it with related +work and explain how it utilizes Linked Open Data and state of the art +Natural Language Processing to realize user-friendly generation of textual +content for the Semantic Web. We also present experimental evaluation +results involving a diverse group of more than 120 participants, +which showed that Seed helped end-users easily create and interact with +semantic content with nearly no prerequisite knowledge. + Seed, an End-user Text Composition Tool for the Semantic Web + + semantic content authoring + + microdata + semantic web + + lod + microdata + + + + + + + + Seed, an End-user Text Composition Tool for the Semantic Web + lod + Seed, an End-user Text Composition Tool for the Semantic Web + + semantic content authoring + + + semantic web + + Despite developments of Semantic Web-enabling technologies, +the gap between non-expert end-users and the Semantic Web still +exists. In the field of semantic content authoring, tools for interacting +with semantic content remain directed at highly trained individuals. This +adds to the challenges of bringing user-generated content into the Semantic +Web. +In this paper, we present Seed, short for Semantic Editor, an extensible +knowledge-supported natural language text composition tool, which +targets non-experienced end-users enabling automatic as well as semiautomatic +creation of standards based semantically annotated textual +content. We point out the structure of Seed, compare it with related +work and explain how it utilizes Linked Open Data and state of the art +Natural Language Processing to realize user-friendly generation of textual +content for the Semantic Web. We also present experimental evaluation +results involving a diverse group of more than 120 participants, +which showed that Seed helped end-users easily create and interact with +semantic content with nearly no prerequisite knowledge. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + 2016-10-20T15:50:00 + 2016-10-20T15:30:00 + + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + Gerard de Melo + + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Semantics + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + + Semantics + + Twitter + + + Feature Engineering + + + + Radicalisation Detection + Twitter + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + + Radicalisation Detection + + + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + From its start, the so-called Islamic State of Iraq and the Levant (ISIL/ISIS) has been successfully exploiting social media networks, most notoriously Twitter, to promote its propaganda and recruit new members, resulting in thousands of social media users adopting pro-ISIS stance every year. Automatic identification of pro-ISIS users on social media has, thus, become the centre of interest for various governmental and research organisations. In this paper we propose a semantic-based approach for radicalisation detection on Twitter. Unlike most previous works, which mainly rely on the lexical and contextual representation of the content published by Twitter users, our approach extracts and makes use of the underlying semantics of words exhibited by these users to identify their pro/anti-ISIS stances. Our results show that classifiers trained from words' semantics outperform those trained from lexical and network features by 2% on average F1-measure. + + From its start, the so-called Islamic State of Iraq and the Levant (ISIL/ISIS) has been successfully exploiting social media networks, most notoriously Twitter, to promote its propaganda and recruit new members, resulting in thousands of social media users adopting pro-ISIS stance every year. Automatic identification of pro-ISIS users on social media has, thus, become the centre of interest for various governmental and research organisations. In this paper we propose a semantic-based approach for radicalisation detection on Twitter. Unlike most previous works, which mainly rely on the lexical and contextual representation of the content published by Twitter users, our approach extracts and makes use of the underlying semantics of words exhibited by these users to identify their pro/anti-ISIS stances. Our results show that classifiers trained from words' semantics outperform those trained from lexical and network features by 2% on average F1-measure. + + + Feature Engineering + + + + + + + The Azkar research project focuses on the remote control of a mobile robot using the emerging Web technologies WebRTC for real time communication. One of the use cases addressed is a remote visit of the French Museum of the Great War in Meaux. For this purpose, we designed an ontology for describing the main scenes in the museum, the objects that compose them, the different trails the robot can follow in a given time period, for a targeted audience, the way points, observation points. This RDF dataset is exploited to assist the human guide in designing a trail, and possibly adapting it during the visit. In this paper we present the Azkar Museum Ontology, the RDF dataset describing some emblematic scenes of the museum, and an experiment that took place in June 2016 with a robot controlled by an operator located 800~kms from the museum. We propose to demonstrate this work in real time during the conference by organizing a remote visit from the conference demo location. + + Robotics + + Semantic Web + Semantic Web + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + Museum Data + Robotics + The Azkar research project focuses on the remote control of a mobile robot using the emerging Web technologies WebRTC for real time communication. One of the use cases addressed is a remote visit of the French Museum of the Great War in Meaux. For this purpose, we designed an ontology for describing the main scenes in the museum, the objects that compose them, the different trails the robot can follow in a given time period, for a targeted audience, the way points, observation points. This RDF dataset is exploited to assist the human guide in designing a trail, and possibly adapting it during the visit. In this paper we present the Azkar Museum Ontology, the RDF dataset describing some emblematic scenes of the museum, and an experiment that took place in June 2016 with a robot controlled by an operator located 800~kms from the museum. We propose to demonstrate this work in real time during the conference by organizing a remote visit from the conference demo location. + + Museum Data + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Isa Guclu + + + + + Isa Guclu + + 4a6bb7ff3169ba73ad0c602322122cbd3e769f13 + + + Isa Guclu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + We present VLog, a new system for answering arbitrary Datalog queries on top of a wide range of databases, including both relational and RDF databases. VLog is designed to perform efficiently intensive rule-based computation on large Knowledge Graphs (KGs). It adapts column-store technologies to attain high efficiency in terms of memory usage and speed, enabling us to process Datalog queries with thousands of rules over databases with hundreds of millions of tuples---in a live demonstration on a laptop. Our demonstration provides in-depth insights into the workings of VLog, and presents important new features such as support for arbitrary relational DBMS. + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + Knowledge graphs + + + + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + + Database + + Datalog + Rules + Rules + We present VLog, a new system for answering arbitrary Datalog queries on top of a wide range of databases, including both relational and RDF databases. VLog is designed to perform efficiently intensive rule-based computation on large Knowledge Graphs (KGs). It adapts column-store technologies to attain high efficiency in terms of memory usage and speed, enabling us to process Datalog queries with thousands of rules over databases with hundreds of millions of tuples---in a live demonstration on a laptop. Our demonstration provides in-depth insights into the workings of VLog, and presents important new features such as support for arbitrary relational DBMS. + + + Knowledge graphs + + + Datalog + + + Reasoning + Database + + + + Reasoning + + + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + + + + + + + + + + + + + + + + + + Kouji Kozaki + + Kouji Kozaki + + + + + + + c493b1b07fa4dacfdda46edc55ebae341758972f + + Kouji Kozaki + + + + + + + + + + + + + + + + + + + + Zlatan Dragisic + + + Zlatan Dragisic + 3419a31dd2cda521ea35433b05de5cfdad69c558 + + Zlatan Dragisic + + + + + + + + 172da8328c6d7e6164b3b7e42bea3c565c88c41d + Wen Sun + Wen Sun + + + + + + + Wen Sun + + + + + + + + + + + + + + + Oslo University Hospital + + Oslo University Hospital + + + + Oslo University Hospital + + + + + + + + + + + + + + + + + + + + + + + + Fabian M. Suchanek + + Fabian M. Suchanek + + + + + + 8246b76bd2ce30dc6d8dbef5272f0597dbab58f2 + + + + Fabian M. Suchanek + + + + + + + + + + + Thomas Ploeger + + + + + + Thomas Ploeger + + + + Thomas Ploeger + + + + MARC + RDF conversion + Exploratory Search Engine + RDF interconnexion + Exploring Linked Classical Music Catalogs with OVERTURE + In this paper, we introduce OVERTURE - a web application allowing to explore the interlinked catalogs of major music libraries including the French National Library, Radio France and the Philharmonie de Paris. We have first developed the DOREMUS ontology which is an extension of the well-known FRBRoo model for describing works and expressions as well as the creation processus. We have implemented a so-called marc2rdf tool allowing for the conversion and linking of bibliographical entries about music works, interpretations and expressions from their original MARC-format to RDF following this DOREMUS ontology. We present an exploratory search engine prototype that enables to browse through the reconciled collection of bibliographical records of classical music and to highlight the various interpretations of a work, its derivative, its performance casting as well as other rich metadata. + FRBRoo + In this paper, we introduce OVERTURE - a web application allowing to explore the interlinked catalogs of major music libraries including the French National Library, Radio France and the Philharmonie de Paris. We have first developed the DOREMUS ontology which is an extension of the well-known FRBRoo model for describing works and expressions as well as the creation processus. We have implemented a so-called marc2rdf tool allowing for the conversion and linking of bibliographical entries about music works, interpretations and expressions from their original MARC-format to RDF following this DOREMUS ontology. We present an exploratory search engine prototype that enables to browse through the reconciled collection of bibliographical records of classical music and to highlight the various interpretations of a work, its derivative, its performance casting as well as other rich metadata. + + + RDF conversion + + + + OVERTURE + + + + FRBRoo + RDF interconnexion + + + Classical Music + Exploring Linked Classical Music Catalogs with OVERTURE + + + + Exploratory Search Engine + + Exploring Linked Classical Music Catalogs with OVERTURE + + + Classical Music + + + + OVERTURE + + + MARC + + + + + + + + Structure-guiding Modular Reasoning for Expressive Ontologies + classification + + We propose a technique that combine an OWL 2 EL reasoner with an OWL 2 reasoner to classify expressive ontologies. We exploit the information implied by the ontology structure to identify a small non-EL ontology that contains necessary axioms to ensure the completeness. In the process of ontology classification, the bulk of workload is delegated to an efficient OWL 2 EL reasoner and the small part of workload is handled by a less efficient OWL 2 reasoner. Experimental results show that our approach leads to a reasonable task assignment and offers a substantial speedup in ontology classification. + + + + + + Structure-guiding Modular Reasoning for Expressive Ontologies + We propose a technique that combine an OWL 2 EL reasoner with an OWL 2 reasoner to classify expressive ontologies. We exploit the information implied by the ontology structure to identify a small non-EL ontology that contains necessary axioms to ensure the completeness. In the process of ontology classification, the bulk of workload is delegated to an efficient OWL 2 EL reasoner and the small part of workload is handled by a less efficient OWL 2 reasoner. Experimental results show that our approach leads to a reasonable task assignment and offers a substantial speedup in ontology classification. + reasoner + + reasoner + ontology + classification + + + Structure-guiding Modular Reasoning for Expressive Ontologies + modular structure + modular structure + + + + ontology + + + + + + + + + + + + + + + + + + + + + + + Scott Markel + + Scott Markel + + Scott Markel + + + + + f0c4df58bfeb861f6b2c2e97ddd40c240abd001d + + + + + + + + + + + + + + + + + + + + + + + + Gong Cheng + Gong Cheng + 90949b51ff990bbe53fa3b030fb31b3c90634551 + + Gong Cheng + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Data publication + + + + Considerable investment in RDF publishing has recently led to the birth of the Web of Data. But is this investment worth it? Are publishers aware of how their linked datasets traffic looks like? +We propose an access analytics platform for linked datasets. The system mines traffic insights from the logs of registered RDF publishers and extracts Linked Data-specific metrics not available in traditional web analytics tools. +We present a demo instance showing one month (December 2014) of real traffic to the British National Bibliography RDF dataset. + + Linked Data + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + Access analytics + Linked Data + + + Considerable investment in RDF publishing has recently led to the birth of the Web of Data. But is this investment worth it? Are publishers aware of how their linked datasets traffic looks like? +We propose an access analytics platform for linked datasets. The system mines traffic insights from the logs of registered RDF publishers and extracts Linked Data-specific metrics not available in traditional web analytics tools. +We present a demo instance showing one month (December 2014) of real traffic to the British National Bibliography RDF dataset. + + + + + + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + + Data publication + + + + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + + Access analytics + + + + Prolog + + + SWISH: An Integrated Semantic Web Notebook + Online editor + Semantic Web + SWISH: An Integrated Semantic Web Notebook + + + + + Semantic Web + + Query editor + + + SPARQL editors make it easier to write and inspect their results. Notebooks already support computer- and data scientists in domains like statistics and machine learning. There is currently not an integrated notebook solution for Semantic Web (SW) programming that combines the strengths of SPARQL editors with the benefits of notebooks. SWISH gives an integrated notebook experience for the Semantic Web programmer. + Notebook + + + SPARQL editors make it easier to write and inspect their results. Notebooks already support computer- and data scientists in domains like statistics and machine learning. There is currently not an integrated notebook solution for Semantic Web (SW) programming that combines the strengths of SPARQL editors with the benefits of notebooks. SWISH gives an integrated notebook experience for the Semantic Web programmer. + Query editor + SWISH: An Integrated Semantic Web Notebook + + Notebook + + + Prolog + Online editor + + + + + + + + + + + + + + + + + + + + + + + + + + Universidad de Chile + Universidad de Chile + + + + + + + + Universidad de Chile + + + + + + + + + + + + + + + + + + + + + 2016-10-20T10:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Ontology Matching + Ontology Matching + 2016-10-20T11:50:00 + 2016-10-20T10:30:00 + + + + + + + + + + Linhong Zhu + + Linhong Zhu + + + + 3cc0050e756a60201573e99e5b1fb9d6209b2042 + + + Linhong Zhu + + + + + + + + Evidence-based Medicine + + + Knowledge Graph + Evidence-based medicine intends to optimize clinical decision making by using evidence. Semantic query answering could help to find the most relevant evidence. However, at point of care, it still lacks time for human reading of the evidence. In this poster, we propose to build an evidence graph for clinical decision support, in which an evidence ontology is defined with extension of SWRL rules. On top of this graph, we do evidence query and evidence fusion to generate the ranking list of decision options. Our prototype implementation of the evidence graph demonstrates its assistance to decision making, by combining a variety of knowledge-driven and data-driven decision services. + Evidence-based medicine intends to optimize clinical decision making by using evidence. Semantic query answering could help to find the most relevant evidence. However, at point of care, it still lacks time for human reading of the evidence. In this poster, we propose to build an evidence graph for clinical decision support, in which an evidence ontology is defined with extension of SWRL rules. On top of this graph, we do evidence query and evidence fusion to generate the ranking list of decision options. Our prototype implementation of the evidence graph demonstrates its assistance to decision making, by combining a variety of knowledge-driven and data-driven decision services. + Knowledge Graph + + + + Building Evidence Graph for Clinical Decision Support + Clinical Decision Support + + Building Evidence Graph for Clinical Decision Support + + + + Evidence-based Medicine + Building Evidence Graph for Clinical Decision Support + + Clinical Decision Support + + + + + + + + Joanna Biega + Joanna Biega + + Joanna Biega + + + + + + + + 40559ae5422c51d3c7e9d96279f7885cb1e39206 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + R2RML + We present the recent progress of SQuaRE, the SPARQL +Query and R2RML mappings Environment which provides a graphical +interface for creating R2RML mappings which can be immediately tested +by executing SPARQL queries. SQuaRE is a web-based tool with easy +to use interface that can be applied in the ontology-based data access +applications. We describe SQuaRE’s main features, its architecture as +well as technical details. + + + Mappings + + + + + + SQuaRE: A Visual Tool For Creating R2RML Mappings + + Visual web based interface + We present the recent progress of SQuaRE, the SPARQL +Query and R2RML mappings Environment which provides a graphical +interface for creating R2RML mappings which can be immediately tested +by executing SPARQL queries. SQuaRE is a web-based tool with easy +to use interface that can be applied in the ontology-based data access +applications. We describe SQuaRE’s main features, its architecture as +well as technical details. + + SQuaRE: A Visual Tool For Creating R2RML Mappings + + Visual web based interface + OBDA + OBDA + R2RML + + Mappings + + SQuaRE: A Visual Tool For Creating R2RML Mappings + + + We present Ontop-spatial, a geospatial extension of the well-known OBDA system Ontop, that leverages the technologies of geospatial databases and enables GeoSPARQL-to-SQL translation. We showcase the functionalities of the system in real-world use cases which require data integration of different geospatial sources. + + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + GeoSPARQL + + + + + + + Open geospatial data + + + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + We present Ontop-spatial, a geospatial extension of the well-known OBDA system Ontop, that leverages the technologies of geospatial databases and enables GeoSPARQL-to-SQL translation. We showcase the functionalities of the system in real-world use cases which require data integration of different geospatial sources. + Land management + Open geospatial data + + Land management + + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + Geospatial databases + + + + + + OBDA + + + OBDA + + + + Geospatial databases + GeoSPARQL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Christoph Lange + + + + + + + Christoph Lange + + + + + Christoph Lange + + + + + + + + + + + + + + + + + Carlos Badenes + + + + + ef698b490b60e35e4fb9ee704d641f85aed03d9e + + Carlos Badenes + + + + + + Carlos Badenes + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + + 2016-10-19T18:00:00 + + 2016-10-19T18:00:00 + + Ghislain Auguste Atemezing and Pierre-Yves Vandenbussche + 2016-10-19T21:00:00 + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ranking + + + + + + + + + + Instance matching is the problem of finding the instances that describe the same object. It can be viewed as a classification problem, where a pair of two instances is predicted as match or non-match. A common limitation of existing classifier-based matching systems is the absent of instance pairs ranking. +We propose using a ranking feature to enhance the classifier in instance matching. +Experiments on real datasets confirm the significant improvement when applying our method. + instance matching + Ranking Feature for Classifier-based Instance Matching + + Ranking Feature for Classifier-based Instance Matching + ranking + Instance matching is the problem of finding the instances that describe the same object. It can be viewed as a classification problem, where a pair of two instances is predicted as match or non-match. A common limitation of existing classifier-based matching systems is the absent of instance pairs ranking. +We propose using a ranking feature to enhance the classifier in instance matching. +Experiments on real datasets confirm the significant improvement when applying our method. + classification + + + + Ranking Feature for Classifier-based Instance Matching + classification + + instance matching + + + + + + + + + + + + + + Corine Deliot + + Corine Deliot + + + + + Corine Deliot + + fa2689cea581553c242165050da94c9aee4b8fd0 + + + + + modeling tool + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + class diagram + + + + + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + Protege plugin + Protege plugin + OWLAx + axiomatization + + + modeling tool + OWLAx + Once the conceptual overview, in terms of a somewhat informal class diagram, has been designed in the course of engineering an ontology, the process of adding many of the appropriate logical axioms is mostly a routine task. We provide a Protege plugin which supports this task, together with a visual user interface, based on established methods for ontology design pattern modeling. + + Protege + class diagram + + axiomatization + + + + + Once the conceptual overview, in terms of a somewhat informal class diagram, has been designed in the course of engineering an ontology, the process of adding many of the appropriate logical axioms is mostly a routine task. We provide a Protege plugin which supports this task, together with a visual user interface, based on established methods for ontology design pattern modeling. + + Protege + + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + + + + + + + + + + + + + + + Marieke van Erp + Marieke van Erp + + + + + + Marieke van Erp + + + + + + + + + + + + + + cecdbe0a45292f05f6f5876fb671c7f23303f6bd + + Gerhard Wohlgenannt + + + + Gerhard Wohlgenannt + + Gerhard Wohlgenannt + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Birte Glimm + Birte Glimm + + + + Birte Glimm + d9e3004543dab6b7586ec0c3846985b999320232 + + + + + + + + + + Human-Machine Collaboration over Linked Data + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Human-Machine Collaboration over Linked Data + + 2016-10-19T21:00:00 + Paolo Pareti + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 739d909c45670ed23a967be0bb5ad067dc8a408b + + + + + Petar Ristoski + Petar Ristoski + + + + Petar Ristoski + + + + + + RDF + + visualisation + + + An interactive visualisation for RDF data + We demonstrate a visualisation aimed at facilitating SPARQL-fluent +users to produce queries over a dataset they are not familiar with. +This visualisation consists of a labelled graph whose nodes are the different types of entities in the RDF dataset, +and where two types are related if entities of these types appear related in the RDF dataset. To avoid a visual overload when +the number of types in a dataset is too big, the graph groups together all types that are subclass of a +more general type, and users are given the option of navigating through this hierarchy of types, dividing type nodes +into subtypes as they see fit. +We illustrate our visualisation using the Linked Movie Database dataset, and offer as well the visualisation of DBpedia. + + + An interactive visualisation for RDF data + RDF + + An interactive visualisation for RDF data + + + endpoint + + endpoint + visualisation + + + + + + + + We demonstrate a visualisation aimed at facilitating SPARQL-fluent +users to produce queries over a dataset they are not familiar with. +This visualisation consists of a labelled graph whose nodes are the different types of entities in the RDF dataset, +and where two types are related if entities of these types appear related in the RDF dataset. To avoid a visual overload when +the number of types in a dataset is too big, the graph groups together all types that are subclass of a +more general type, and users are given the option of navigating through this hierarchy of types, dividing type nodes +into subtypes as they see fit. +We illustrate our visualisation using the Linked Movie Database dataset, and offer as well the visualisation of DBpedia. + + + + + + + + + + + + + + + + + + + + + + Multi-level semantic labelling of numerical values + + + + With the success of Open Data a huge amount of tabular data sources +became available that could potentially be mapped and linked into the Web of +(Linked) Data. Most existing approaches to “semantically label” such tabular +data rely on mappings of textual information to classes, properties, or instances +in RDF knowledge bases in order to link – and eventually transform – tabular +data into RDF. However, as we will illustrate, Open Data tables typically contain +a large portion of numerical columns and/or non-textual headers; therefore +solutions that solely focus on textual “cues” are only partially applicable for mapping +such data sources. We propose an approach to find and rank candidates of +semantic labels and context descriptions for a given bag of numerical values. To +this end, we apply a hierarchical clustering over information taken from DBpedia +to build a background knowledge graph of possible “semantic contexts” for +bags of numerical values, over which we perform a nearest neighbour search to +rank the most likely candidates. Our evaluation shows that our approach can assign +fine-grained semantic labels, when there is enough supporting evidence in +the background knowledge graph. In other cases, our approach can nevertheless +assign high level contexts to the data, which could potentially be used in combination +with other approaches to narrow down the search space of possible labels. + numerical data + With the success of Open Data a huge amount of tabular data sources +became available that could potentially be mapped and linked into the Web of +(Linked) Data. Most existing approaches to “semantically label” such tabular +data rely on mappings of textual information to classes, properties, or instances +in RDF knowledge bases in order to link – and eventually transform – tabular +data into RDF. However, as we will illustrate, Open Data tables typically contain +a large portion of numerical columns and/or non-textual headers; therefore +solutions that solely focus on textual “cues” are only partially applicable for mapping +such data sources. We propose an approach to find and rank candidates of +semantic labels and context descriptions for a given bag of numerical values. To +this end, we apply a hierarchical clustering over information taken from DBpedia +to build a background knowledge graph of possible “semantic contexts” for +bags of numerical values, over which we perform a nearest neighbour search to +rank the most likely candidates. Our evaluation shows that our approach can assign +fine-grained semantic labels, when there is enough supporting evidence in +the background knowledge graph. In other cases, our approach can nevertheless +assign high level contexts to the data, which could potentially be used in combination +with other approaches to narrow down the search space of possible labels. + semantic labelling + + numerical data + + + + + + + + + Multi-level semantic labelling of numerical values + + + + semantic labelling + Open Data + Open Data + + Multi-level semantic labelling of numerical values + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Politecnico di Milano + + + + Politecnico di Milano + + Politecnico di Milano + + + + + + + + + + data usage + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + + user experience + + + + There is an increasing presence of structured data due to the adoption of Linked data principles on the web. At the same time, web users have different skills and want to be able to interact with Linked datasets in various manner, such as asking questions in natural language. Over the last years, the QALD challenges series are becoming the references for benchmarking question answering systems. However, QALD questions are targeted on datasets, not on vocabulary catalogues. This paper proposed a first implementation of Query Answering system (QA) applied to the Linked Open Vocabularies (LOV) catalogue, mainly focused on metadata information retrieval. The goal is to provide to end users yet another access to metadata information available in LOV using natural language questions. + + + Vocabulary Catalogue + + Vocabulary Catalogue + + + Question Answering + + + + + data usage + user experience + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + Question Answering + + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + There is an increasing presence of structured data due to the adoption of Linked data principles on the web. At the same time, web users have different skills and want to be able to interact with Linked datasets in various manner, such as asking questions in natural language. Over the last years, the QALD challenges series are becoming the references for benchmarking question answering systems. However, QALD questions are targeted on datasets, not on vocabulary catalogues. This paper proposed a first implementation of Query Answering system (QA) applied to the Linked Open Vocabularies (LOV) catalogue, mainly focused on metadata information retrieval. The goal is to provide to end users yet another access to metadata information available in LOV using natural language questions. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:30:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T10:30:00 + Data Mining + 2016-10-21T11:50:00 + Data Mining + + + + + + + + + + + + + Filip De Turck + + + + a7caf54a202dd5d2d7ba5a707601e3a5b06541fc + + + + + + Filip De Turck + + + + Filip De Turck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + Chase + + + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + + + Reasoning + Acyclicity notions + + Reasoning + + + + Chase + + + Horn Description Logics + + + + + + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + Conjunctive query answering + + Conjunctive query answering + Horn Description Logics + Conjunctive query answering over expressive Horn Description Logic ontologies is a relevant and challenging problem which, in some cases, can be addressed by application of the chase algorithm. +In this paper, we define a novel acyclicity notion which provides sufficient condition for termination of the restricted chase over Horn-SRIQ ontologies. +We show that our notions generalize most of the existing acyclicity conditions (both theoretically and empirically) and its use results in a more efficient reasoning procedure. +Furthermore, we implement a materialization based reasoner for acyclic ontologies which vastly outperforms state-of-the-art reasoners. + + Conjunctive query answering over expressive Horn Description Logic ontologies is a relevant and challenging problem which, in some cases, can be addressed by application of the chase algorithm. +In this paper, we define a novel acyclicity notion which provides sufficient condition for termination of the restricted chase over Horn-SRIQ ontologies. +We show that our notions generalize most of the existing acyclicity conditions (both theoretically and empirically) and its use results in a more efficient reasoning procedure. +Furthermore, we implement a materialization based reasoner for acyclic ontologies which vastly outperforms state-of-the-art reasoners. + + Acyclicity notions + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Suvodeep Mazumdar + + + Suvodeep Mazumdar + + 9b8c784e7ba2e9fe6861bcd825c5075ff1c3ba5f + + + Suvodeep Mazumdar + + + + + + + + + Translating Ontologies in a Real-World Setting with ESSOT + + + + Collaborative Ontology Management Platform + + Context-based Concept Translation + + Ontology Translation + + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +The main challenge in translating ontologies is to find the right term with respect to the domain modeled by ontology itself. +Machine translation services may help in this task; however, a crucial requirement is to have translations validated by experts before the ontologies are deployed. +Real-world applications must implement a support system addressing this task for relieve experts work in validating all translations. +In this paper, we present ESSOT, an Expert Supporting System for Ontology Translation. +The peculiarity of this system is to exploit semantic information of the concept's context for improving the quality of label translations. +The system has been tested both within the Organic.Lingua project by translating the modeled ontology in three languages and on other multilingual ontologies in order to evaluate the effectiveness of the system in other contexts. +The results have been compared with the translations provided by the Microsoft Translator API and the improvements demonstrated the viability of the proposed approach. + + + Collaborative Ontology Management Platform + Multilingual Ontology Management + Context-based Concept Translation + + + + + Translating Ontologies in a Real-World Setting with ESSOT + Ontology Translation + + + Translating Ontologies in a Real-World Setting with ESSOT + Multilingual Ontology Management + + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +The main challenge in translating ontologies is to find the right term with respect to the domain modeled by ontology itself. +Machine translation services may help in this task; however, a crucial requirement is to have translations validated by experts before the ontologies are deployed. +Real-world applications must implement a support system addressing this task for relieve experts work in validating all translations. +In this paper, we present ESSOT, an Expert Supporting System for Ontology Translation. +The peculiarity of this system is to exploit semantic information of the concept's context for improving the quality of label translations. +The system has been tested both within the Organic.Lingua project by translating the modeled ontology in three languages and on other multilingual ontologies in order to evaluate the effectiveness of the system in other contexts. +The results have been compared with the translations provided by the Microsoft Translator API and the improvements demonstrated the viability of the proposed approach. + + + + + + + + + + + + + + + c7c523526586e7f6e0f7572f17789b43434485e1 + + + + + Christoforos Svingos + Christoforos Svingos + + + + + Christoforos Svingos + + + + + + + + + + + + + + + + + 2016-10-21T15:50:00 + 2016-10-21T15:50:00 + Multi-level semantic labelling of numerical values + + 2016-10-21T15:50:00 + Sebastian Neumaier, Jürgen Umbrich, Josiane Xavier Parreira and Axel Polleres + 2016-10-21T15:30:00 + + 2016-10-21T15:50:00 + + Multi-level semantic labelling of numerical values + 2016-10-21T15:30:00 + + + TNO - Netherlands Organization for Applied Scientific Research + + + + TNO - Netherlands Organization for Applied Scientific Research + + + + + TNO - Netherlands Organization for Applied Scientific Research + + + + + Alexander Panchenko + + + + Alexander Panchenko + + + + Alexander Panchenko + cf057f91ef698401ee2a6c1560dc5972f60b3a7c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nathalie Hernandez + + + Nathalie Hernandez + + + + + b431523ad1b0cb8a4ce91720831c9927ae6405ba + + Nathalie Hernandez + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Conformity + Mappings + The entity type is considered as very important in DBpedia. Since this information is inconsistently described in different languages, it is difficult to recognize the most suitable type of an entity. We propose a method to predict the entity type based on a novel conformity measure. We combine the consideration of the specific-level of and the majority voting. The experiment result shows that our method can suggest informative types and outperforms the baselines. + Conformity + + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + + Consistency + Onology + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + Onology + DBpedia + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + + + + + + + + + + + The entity type is considered as very important in DBpedia. Since this information is inconsistently described in different languages, it is difficult to recognize the most suitable type of an entity. We propose a method to predict the entity type based on a novel conformity measure. We combine the consideration of the specific-level of and the majority voting. The experiment result shows that our method can suggest informative types and outperforms the baselines. + + + + Mappings + + + + DBpedia + Consistency + + + + + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + + 2016-10-20T10:50:00 + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + 2016-10-20T11:10:00 + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + Damien Graux, Louis Jachiet, Pierre Geneves and Nabil Layaida + 2016-10-20T10:50:00 + + + 46888155168581432fee8e6ada04ab13add97543 + Liang Zhao + Liang Zhao + + + + + Liang Zhao + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Thierry Bergeron + + + + + + Thierry Bergeron + + + Thierry Bergeron + afde5c5a9ef2d57456432187fda4e8961c60e7dd + + + + + + + 2016-10-19T21:00:00 + John P. Mccrae and Philipp Cimiano + LIXR: Quick, succinct conversion of XML to RDF + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + LIXR: Quick, succinct conversion of XML to RDF + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + Democritus University of Thrace + Democritus University of Thrace + Democritus University of Thrace + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Konstantina Bereta + + + + 6dc2e6c44ba12728237ec901f3f5190970a8dbf3 + + Konstantina Bereta + + + + + Konstantina Bereta + + + + + + + + + + + + + + + + + + + + CubeQA—Question Answering on RDF Data Cubes + + Data Cube + + CubeQA—Question Answering on RDF Data Cubes + Data Cube + Statistical data in the form of RDF Data Cubes is becoming increasingly valuable as it influences decisions in areas such as health care, policy and finance. While a growing amount is becoming freely available through the open data movement, this data is opaque to laypersons. Semantic Question Answering (SQA) technologies provide access via free-form natural language queries but general SQA systems cannot process RDF Data Cubes. On the intersection between RDF Data Cubes and SQA, we create a new subfield of SQA, called RDCQA. We create an RDQCA benchmark as task 3 of the QALD-6 evaluation challenge, to stimulate further research and enable quantitative comparison between RDCQA systems. We design and evaluate the CubeQA algorithm, which +is the first RDCQA system and achieves a global F 1 score of 0.43 on the QALD6T3-test dataset, showing that RDCQA is feasible. + + + + Question Answering + + + + Question Answering + + + Statistical data in the form of RDF Data Cubes is becoming increasingly valuable as it influences decisions in areas such as health care, policy and finance. While a growing amount is becoming freely available through the open data movement, this data is opaque to laypersons. Semantic Question Answering (SQA) technologies provide access via free-form natural language queries but general SQA systems cannot process RDF Data Cubes. On the intersection between RDF Data Cubes and SQA, we create a new subfield of SQA, called RDCQA. We create an RDQCA benchmark as task 3 of the QALD-6 evaluation challenge, to stimulate further research and enable quantitative comparison between RDCQA systems. We design and evaluate the CubeQA algorithm, which +is the first RDCQA system and achieves a global F 1 score of 0.43 on the QALD6T3-test dataset, showing that RDCQA is feasible. + Semantic Web + + + Semantic Web + + CubeQA—Question Answering on RDF Data Cubes + + + + + + + + + + + + + + + + + + + + + Ganesh Ramakrishnan + Ganesh Ramakrishnan + + + + + + + Ganesh Ramakrishnan + b36f5c490b830f5fd6580cebb8a587ead9acb559 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Seoul National University + + + Seoul National University + Seoul National University + + + + + + + + + + + + + + + + + + + Using word2vec to Build a Simple Ontology Learning System + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Using word2vec to Build a Simple Ontology Learning System + 2016-10-19T21:00:00 + Gerhard Wohlgenannt and Filip Minic + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Olaf Hartig and Carlos Buil Aranda + 2016-10-19T21:00:00 + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + + + + + + + + + + + + + + + + + + + + + + + + + + EXISTStential Aspects of SPARQL + EXISTStential Aspects of SPARQL + SPARQL + + The SPARQL 1.1 Query Language cite{SPARQL} permits patterns inside {sf FILTER} +expressions using the {sf EXISTS} construct, specified by using substitution. +Substitution destroys some of the aspects of SPARQL that make +it suitable as a data access language. As well, substitution causes +problems in the SPARQL algebra and produces counterintuitive results. +Fixing the problems with {sf EXISTS} is best done with a completely different +definition that does not use substitution at all. + + + + + + + + SPARQL + + The SPARQL 1.1 Query Language cite{SPARQL} permits patterns inside {sf FILTER} +expressions using the {sf EXISTS} construct, specified by using substitution. +Substitution destroys some of the aspects of SPARQL that make +it suitable as a data access language. As well, substitution causes +problems in the SPARQL algebra and produces counterintuitive results. +Fixing the problems with {sf EXISTS} is best done with a completely different +definition that does not use substitution at all. + + EXISTStential Aspects of SPARQL + Semantic Web + RDF + + + + RDF + + Semantic Web + + + + René Speck + + René Speck + + + + + René Speck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Marco Rospocher + Marco Rospocher + Marco Rospocher + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + Wei Emma Zhang, Ermyas Abebe, Quan Z. Sheng and Kerry Taylor + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nick Portokallidis + Nick Portokallidis + + + + + + + + Nick Portokallidis + + + + + 2016-10-19T21:00:00 + Adam Sotona + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + 2016-10-19T18:00:00 + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T14:40:00 + 2016-10-19T14:40:00 + + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + 2016-10-19T14:20:00 + 2016-10-19T14:40:00 + + Shusaku Egami, Takahiro Kawamura and Akihiko Ohsuga + 2016-10-19T14:20:00 + 2016-10-19T14:40:00 + + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + + + + + + + + + + + + + + + + Craig Knoblock + + + + + Craig Knoblock + + Craig Knoblock + + + + + 2c2715555efac793759255fe12d117541cf52a37 + + + + + + + + + + + + + + + + + + + + + + + Marco Balduini + cfab487eaef36c19086e0179c2fe26284f2d913a + + + + + + Marco Balduini + + Marco Balduini + + + + + + + + + + + + + + + + + + + + + Marcelo Arenas + 263fbdd4ad0d4f74a9a210582a03d51d7e04b904 + + + + + + + + + Marcelo Arenas + Marcelo Arenas + + + + + + + Heshan Du + + + + + + + Heshan Du + + 4dc27b50f093d4b0ab10a3ccd64e73706971b8ca + + + + Heshan Du + + + + + + + + + + + + + Shen Gao, Daniele Dell'Aglio, Soheila Dehghanzadeh, Abraham Bernstein, Emanuele Della Valle and Alessandra Mileo + + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + 2016-10-20T13:30:00 + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + + 2016-10-20T13:30:00 + + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + + + Stasinos Konstantopoulos + + + + Stasinos Konstantopoulos + + Stasinos Konstantopoulos + + c27c2c6e0668abc64be17d28decbae5ce6b9de93 + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Kai Lenz, Hiroshi Masuya and Norio Kobayashi + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + Learning to Assess Linked Data Relationships Using Genetic Programming + Learning to Assess Linked Data Relationships Using Genetic Programming + + 2016-10-20T14:10:00 + 2016-10-20T14:10:00 + Ilaria Tiddi, Mathieu d'Aquin and Enrico Motta + 2016-10-20T13:50:00 + 2016-10-20T14:10:00 + 2016-10-20T13:50:00 + + + 2016-10-20T14:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A Survey on Challenges in Web Markup Data for Entity Retrieval + + + Entity Retrieval + + + Web Markup + + Embedded markup based on Microdata, RDFa, and Microformats have become prevalent on the Web and constitute an unprecedented data source. RDF statements from markup are highly redundant, co-references are very frequent yet explicit links are missing, and with numerous errors in such statements. + +We present a thorough analysis on the challenges associated with markup data in the context of entity retrieval. We analyze four main factors: (i) co-references, (ii) redundancy, (iii) inconsistencies, and (iv) accessibility of information in the case of URLs. We conclude with general guidelines on how to avoid such challenges when dealing with embedded markup data. + + + Web Markup + + + + + + + A Survey on Challenges in Web Markup Data for Entity Retrieval + Embedded markup based on Microdata, RDFa, and Microformats have become prevalent on the Web and constitute an unprecedented data source. RDF statements from markup are highly redundant, co-references are very frequent yet explicit links are missing, and with numerous errors in such statements. + +We present a thorough analysis on the challenges associated with markup data in the context of entity retrieval. We analyze four main factors: (i) co-references, (ii) redundancy, (iii) inconsistencies, and (iv) accessibility of information in the case of URLs. We conclude with general guidelines on how to avoid such challenges when dealing with embedded markup data. + Knowledge Base Augmentation + Knowledge Base Augmentation + + Web Data Commons + A Survey on Challenges in Web Markup Data for Entity Retrieval + + Web Data Commons + + + + Entity Retrieval + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + Stasinos Konstantopoulos, Angelos Charalambidis, Giannis Mouchakis, Antonis Troumpoukis, Jürgen Jakobitsch and Vangelis Karkaletsis + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + 9c33f970b7c8ccb56bc9edc435e83941a3b379f8 + + + + + Chris Biemann + + + + + Chris Biemann + + + Chris Biemann + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + DeFacto - Temporal and multilingual Deep Fact Validation + 2016-10-19T12:00:00 + DeFacto - Temporal and multilingual Deep Fact Validation + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + + Daniel Gerber, Diego Esteves, Jens Lehmann, Lorenz Bühmann, Ricardo Usbeck, Axel-Cyrille Ngonga Ngomo and René Speck + + 2016-10-19T12:00:00 + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Enriching Answers in Question Answering Systems using Linked Data + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Rivindu Perera, Parma Nand and Gisela Klette + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + Enriching Answers in Question Answering Systems using Linked Data + + + + + + Newcastle University + + Newcastle University + + + + Newcastle University + + + + + + University of Jyväskylä + + + + + University of Jyväskylä + + + + + University of Jyväskylä + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Paolo Pareti + + + + Paolo Pareti + + 781ab0cdaf783bea7cb49f37df9d31aa3b32e922 + + + Paolo Pareti + + + SPARQL + Nowadays, governments and public agencies publish open data at an exponentially growing rate on dedicated portals. These open data have a problem: they don’t have a well defined structure, because the focus is on publishing data and not on how they are used. GovLOD is a platform that aims to transform the information found in these heterogeneous files in Linked Open Data using RDF triples. + + RDF + + SPARQL + GovLOD: Towards a Linked Open Data Portal + Nowadays, governments and public agencies publish open data at an exponentially growing rate on dedicated portals. These open data have a problem: they don’t have a well defined structure, because the focus is on publishing data and not on how they are used. GovLOD is a platform that aims to transform the information found in these heterogeneous files in Linked Open Data using RDF triples. + + + GovLOD: Towards a Linked Open Data Portal + + Linked Open Data + OCR + + + GovLOD: Towards a Linked Open Data Portal + + + + OCR + + + + RDF + + Linked Open Data + + + + Theofilos Mailis + + + + + + + + Theofilos Mailis + + Theofilos Mailis + + + 0cccd26a3c6ad39b1a942b4ae2bbe42f119df5ed + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Xinqi Qian + + + Xinqi Qian + + + + + + Xinqi Qian + + + + 63bbd783ee647baf05e503e74387e94aeffb4f57 + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + Joo Sungmin, Seiji Koide, Hideaki Takeda, Daisuke Horyu, Akane Takezaki and Tomokazu Yoshida + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d88d0e2e1df2be16c9319b862d2a2e00a4a89a9f + + Jasper Roes + Jasper Roes + + + + Jasper Roes + + + + + + + + + + + + + + + + + + + + + + + Kaiser Permanente / ITHSDO + + Kaiser Permanente / ITHSDO + + + Kaiser Permanente / ITHSDO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Rivindu Perera, Parma Nand and Gisela Klette + + + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Freddie Venegas + + + + + Freddie Venegas + + + 690718ca20d735c96fd000d31b024c11d677bd10 + Freddie Venegas + + + + + + + + + + + + + + + + + + + + + knowledge base + + + knowledge base + + + Wikipedia + + YAGO is a large knowledge base that is built automatically from Wikipedia, WordNet and GeoNames. The project combines information from 10 Wikipedias of different languages, thus giving the knowledge a multilingual dimension. It also attaches spatial and temporal information to many facts, and thus allows the user to query the data over space and time. YAGO focuses on extraction quality and achieves a manually evaluated precision of 95%. In this paper, we explain from a general perspective how YAGO is built from its sources, how its quality is evaluated, how a user can access it, and how other projects utilize it. + YAGO is a large knowledge base that is built automatically from Wikipedia, WordNet and GeoNames. The project combines information from 10 Wikipedias of different languages, thus giving the knowledge a multilingual dimension. It also attaches spatial and temporal information to many facts, and thus allows the user to query the data over space and time. YAGO focuses on extraction quality and achieves a manually evaluated precision of 95%. In this paper, we explain from a general perspective how YAGO is built from its sources, how its quality is evaluated, how a user can access it, and how other projects utilize it. + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + + WordNet + + + + WordNet + + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + Geonames + + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + Geonames + + + + + + + + + + + Wikipedia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dimitris Kontokostas + + + Dimitris Kontokostas + + + + 3e57edc1ccd259391e188db92889f8aafb2f9162 + + Dimitris Kontokostas + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Antonis Troumpoukis + + + Antonis Troumpoukis + + + Antonis Troumpoukis + + + + 5d383f8a98a158ff54afb898af352c5799b0b5b3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + WU Vienna + + + + + + WU Vienna + WU Vienna + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Universitat de Lleida + + Universitat de Lleida + Universitat de Lleida + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jelle Nelis + + + + + + + + 89b8e847d443c5e54dad39f679ccb76db7edeccc + Jelle Nelis + Jelle Nelis + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The OWL Reasoner Evaluation (ORE) 2015 Resources + The OWL Reasoner Evaluation (ORE) 2015 Resources + 2016-10-21T16:10:00 + Bijan Parsia, Nicolas Matentzoglu, Rafael S. Gonçalves, Birte Glimm and Andreas Steigmiller + + 2016-10-21T16:10:00 + + 2016-10-21T16:30:00 + 2016-10-21T16:30:00 + 2016-10-21T16:30:00 + + 2016-10-21T16:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + 06ccd0bcd02f785b9c5f1a0f85a08a77fbe3f86d + + + + + Martina Hodrius + Martina Hodrius + + + Martina Hodrius + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Julian Dolby + + + + + + Julian Dolby + + + Julian Dolby + ab2bddc57e5d65d6f1e31ed59c14e0063bc0fb65 + + + + + + + + + + + + + + + + + + + fa510b49182ab0643e6a0c3bfd30588ae83a77fb + Conrad Bielski + + Conrad Bielski + + Conrad Bielski + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + emergent schema + emergent schema + + Query optimization + We build on our earlier finding that more than 95% of the triples in actual RDF triple graphs have a remarkably tabular structure, whose schema does not necessarily follow from explicit metadata such as ontologies, but which an RDF store can automatically derive by looking at the data using so-called ``emergent schema'' detection techniques. In this paper we investigate how computers and in particular RDF stores can take advantage from this emergent schema to more compactly store RDF data and more efficiently optimize and execute SPARQL queries. To this end, we contribute techniques for efficient emergent schema aware RDF storage and new query operator algorithms for emergent schema aware scans and joins. In all, these techniques allow RDF schema processors fully catch up with relational database techniques in terms of rich physical database design options and efficiency, without requiring a rigid upfront schema structure definition. + RDF + + + SPARQL + + Exploiting Emergent Schemas to make RDF systems more efficient + SPARQL + + + Query optimization + + + + + + Exploiting Emergent Schemas to make RDF systems more efficient + Exploiting Emergent Schemas to make RDF systems more efficient + We build on our earlier finding that more than 95% of the triples in actual RDF triple graphs have a remarkably tabular structure, whose schema does not necessarily follow from explicit metadata such as ontologies, but which an RDF store can automatically derive by looking at the data using so-called ``emergent schema'' detection techniques. In this paper we investigate how computers and in particular RDF stores can take advantage from this emergent schema to more compactly store RDF data and more efficiently optimize and execute SPARQL queries. To this end, we contribute techniques for efficient emergent schema aware RDF storage and new query operator algorithms for emergent schema aware scans and joins. In all, these techniques allow RDF schema processors fully catch up with relational database techniques in terms of rich physical database design options and efficiency, without requiring a rigid upfront schema structure definition. + RDF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T09:00:00 + 2016-10-19T09:30:00 + 2016-10-19T09:30:00 + 2016-10-19T09:30:00 + 2016-10-19T09:00:00 + 2016-10-19T09:30:00 + Opening Ceremony + Opening Ceremony + + + + + + + + + + + + + + + + + + + + + + + Scott Kimberly + + + + + + 4b4e9dce5c1c6a46bb6071cbab4f7f6e7b14ebf3 + + + Scott Kimberly + + + Scott Kimberly + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Domenico Fabio Savo + + + + + f74605d82f83f7f00a084b4540127266437a0dbf + + + + Domenico Fabio Savo + + + Domenico Fabio Savo + + + + + + + + + Akihiko Ohsuga + + + 9aa155d3f61640d018c23a3964285756f30be617 + Akihiko Ohsuga + + Akihiko Ohsuga + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Xavier Oriol + + + + Xavier Oriol + + + + + + + + 9c74a8e7a43688d61046ed2ec2cc3134bb9d40c2 + + Xavier Oriol + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + 2016-10-18T14:45:00 + 2016-10-18T14:45:00 + 2016-10-18T14:30:00 + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + + 2016-10-18T14:45:00 + + 2016-10-18T14:30:00 + 2016-10-18T14:45:00 + Jinhyun Ahn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Hasso-Plattner-Institute + + + + Hasso-Plattner-Institute + + Hasso-Plattner-Institute + + + + + + + + + + + + + + + + + + + + + LAAS-CNRS + LAAS-CNRS + + + + + + + LAAS-CNRS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + INRIA / Univ. Grenoble + INRIA / Univ. Grenoble + + + + + INRIA / Univ. Grenoble + + + + + + + + + + + + + + Extending SPARQL for data analytic tasks + 2016-10-20T10:50:00 + Extending SPARQL for data analytic tasks + 2016-10-20T10:30:00 + 2016-10-20T10:50:00 + Kavitha Srinivas, Julian Dolby, Achille Fokoue, Mariano Rodríguez Muro and Wen Sun + 2016-10-20T10:30:00 + 2016-10-20T10:50:00 + + 2016-10-20T10:50:00 + + + + Airbus Defence and Space + + + + + Airbus Defence and Space + + + + Airbus Defence and Space + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-17T09:00:00 + 2016-10-21T18:00:00 + 2016-10-21T18:00:00 + 2016-10-21T18:00:00 + 2016-10-17T09:00:00 + ISWC 2016 + 2016-10-21T18:00:00 + + + 2016-10-20T16:30:00 + 2016-10-20T16:10:00 + 2016-10-20T16:30:00 + + + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + 2016-10-20T16:10:00 + Semantic Sensitive Simultaneous Tensor Factorization + Semantic Sensitive Simultaneous Tensor Factorization + Makoto Nakatsuji + + + + + + + + + + + + + + + + + + + + + + + + + + Eugene Siow + + + + + 82546c7d01fb2cfa7aebabf9051508329b4ed818 + + + Eugene Siow + + + Eugene Siow + + + + + + + + + + + + + + + + + + Monika Solanki + + + + + + + + Monika Solanki + Monika Solanki + 9a9a3db0c71354edefbcd034fb6c43c2645cfdcc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Query Interpretation + The amount of entities in large knowledge bases available on the Web has been increasing rapidly, making it possible to propose new ways of intelligent information access. In addition, there is an impending need for technologies that can enable cross-lingual information access. As a simple and intuitive way of specifying information needs, keyword queries enjoy widespread usage, but suffer from the challenges including ambiguity, incompleteness and cross-linguality. In this paper, we present a knowledge base approach to cross-lingual keyword query interpretation by transforming keyword queries in different languages to their semantic representation, which can facilitate query disambiguation and expansion, and also bridge the language barriers of queries. The experimental results show that our approach achieves both high efficiency and effectiveness and considerably outperforms the baselines. + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + Cross-lingual + + Query Interpretation + + + + + The amount of entities in large knowledge bases available on the Web has been increasing rapidly, making it possible to propose new ways of intelligent information access. In addition, there is an impending need for technologies that can enable cross-lingual information access. As a simple and intuitive way of specifying information needs, keyword queries enjoy widespread usage, but suffer from the challenges including ambiguity, incompleteness and cross-linguality. In this paper, we present a knowledge base approach to cross-lingual keyword query interpretation by transforming keyword queries in different languages to their semantic representation, which can facilitate query disambiguation and expansion, and also bridge the language barriers of queries. The experimental results show that our approach achieves both high efficiency and effectiveness and considerably outperforms the baselines. + + + Semantic Search + + + + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + + + + + Cross-lingual + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + Semantic Search + + + + + + + + + + + + + + + Jacopo Urbani + adee22b352dd8781301e45e715855e4d92dc034b + Jacopo Urbani + + + + + Jacopo Urbani + + + + + + Semantic Web Company + Semantic Web Company + + + + + + Semantic Web Company + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Wouter Maroy + + + + + 1d8e3247b84f8cc2c9c6a2d21b45da4cc7d3ffc0 + Wouter Maroy + + + Wouter Maroy + + + + + + + + + + + + + + + + + + + + + + + + + + + Stephan Grimm + + + + Stephan Grimm + e74280013eee6aa91402c3d32e196248801c4c52 + + Stephan Grimm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kazunari Hashimoto + + Kazunari Hashimoto + + + + + 6d66e2d997ed04b84e2c08f1ca218495b2ca16d5 + + Kazunari Hashimoto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Muhammad Saleem + + + + Muhammad Saleem + Muhammad Saleem + + + + + + + + + Yosky Kataoka + + + + + + Yosky Kataoka + Yosky Kataoka + b25d3c4e6549cabd57a39ce40b0cd55d5234920c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Petar Ristoski, Gerben Klaas Dirk de Vries and Heiko Paulheim + 2016-10-20T16:10:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + + + + Climate Change + We introduce EnergyUse, a collaborative website designed for raising climate change awareness by offering users the ability to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption and community generated energy tips as linked data. EnergyUse is supported by multiples automatic processes that semantically link related contributions, generate appliances descriptions and publish consumption data using the EnergyUse ontology. + Energy Monitors + Semantic Collective Platforms + + + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + + + + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + Semantic Collective Platforms + + + + + Energy Monitors + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + + + Climate Change + Energy Consumption + + We introduce EnergyUse, a collaborative website designed for raising climate change awareness by offering users the ability to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption and community generated energy tips as linked data. EnergyUse is supported by multiples automatic processes that semantically link related contributions, generate appliances descriptions and publish consumption data using the EnergyUse ontology. + Energy Consumption + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Studying Metadata for better client-server trade-offs in Linked Data publishing + + Studying Metadata for better client-server trade-offs in Linked Data publishing + 2016-10-18T14:15:00 + Miel Vander Sande + 2016-10-18T14:15:00 + 2016-10-18T14:00:00 + 2016-10-18T14:15:00 + 2016-10-18T14:15:00 + + 2016-10-18T14:00:00 + + + + + 4fb9b67420f8f10a024c13f62f3119f2d77f3319 + Joo Sungmin + + Joo Sungmin + + + + + + + + Joo Sungmin + + + + + + + + + + + + + + + + + + a27d42b6a587d5e289e2b7b901dc097b9c1a777e + + + + + + Renzo Angles + + Renzo Angles + Renzo Angles + + + + + + + + + + + + + + + Genetic Programming + The goal of this work is to learn a measure supporting the detection of strong relationships between Linked Data entities. Such relationships can be represented as paths of entities and properties, and can be obtained through a blind graph search process traversing Linked Data. The challenge here is therefore the design of a cost-function that is able to detect the strongest relationship between two given entities, by objectively assessing the value of a given path. To achieve this, we use a Genetic Programming approach in a supervised learning method to generate path evaluation functions that compare well with human evaluations. We show how such a cost-function can be generated only using basic topological features of the nodes of the paths as they are being traversed (i.e. without knowledge of the whole graph), and how it can be improved through introducing a very small amount of knowledge about the vocabularies of the properties that connect nodes in the graph. + + + + Genetic Programming + + + Learning to Assess Linked Data Relationships Using Genetic Programming + + Learning to Assess Linked Data Relationships Using Genetic Programming + + + Linked Data + + + Entity Relatedness + + + Entity Relatedness + + The goal of this work is to learn a measure supporting the detection of strong relationships between Linked Data entities. Such relationships can be represented as paths of entities and properties, and can be obtained through a blind graph search process traversing Linked Data. The challenge here is therefore the design of a cost-function that is able to detect the strongest relationship between two given entities, by objectively assessing the value of a given path. To achieve this, we use a Genetic Programming approach in a supervised learning method to generate path evaluation functions that compare well with human evaluations. We show how such a cost-function can be generated only using basic topological features of the nodes of the paths as they are being traversed (i.e. without knowledge of the whole graph), and how it can be improved through introducing a very small amount of knowledge about the vocabularies of the properties that connect nodes in the graph. + + + Learning to Assess Linked Data Relationships Using Genetic Programming + + Linked Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T17:00:00 + 2016-10-18T17:30:00 + 2016-10-18T17:30:00 + 2016-10-18T17:30:00 + Poster 3 + 2016-10-18T17:00:00 + 2016-10-18T17:30:00 + Poster 3 + + + + + + + + + + + + + + Ruben Verborgh + + Ruben Verborgh + + + fb22bc1100f1f5b282380024f58bf4e906fd3e69 + + + + Ruben Verborgh + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + 2016-10-18T14:15:00 + 2016-10-18T14:30:00 + 2016-10-18T14:30:00 + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + + 2016-10-18T14:30:00 + 2016-10-18T14:15:00 + Yordan Terziev + + 2016-10-18T14:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Eiki Takayama + + 1848905605caa9952b93550a4b5ba43de98da8d2 + + + + Eiki Takayama + + + Eiki Takayama + + + + Lijuan Xue + + + Lijuan Xue + + + Lijuan Xue + + dec36ae14a493558407a5f9c57d3dadea131f528 + + + + + + + + + Vienna University of Economics and Business + Vienna University of Economics and Business + + Vienna University of Economics and Business + + + + + + + + + + + + + + + + + + + + + Yordan Terziev + + Yordan Terziev + 6d4a545ed7db4840477a964fcec27d83c3deb354 + + + + + + Yordan Terziev + + + + + + + 0eddeb7726394f5c00d415d6e0e93075fd6be528 + + + + + + Thomas Wilmering + Thomas Wilmering + + + + Thomas Wilmering + + + + + + + + In this demo we present an extension of SPARQL which allows queries to connect to JSON APIs and integrate the obtained information into query answers. We achieve this by adding a new operator to SPARQL, and implement this extension on top of the Jena framework in order to illustrate how it functions with real world APIs. + + In this demo we present an extension of SPARQL which allows queries to connect to JSON APIs and integrate the obtained information into query answers. We achieve this by adding a new operator to SPARQL, and implement this extension on top of the Jena framework in order to illustrate how it functions with real world APIs. + + + + + + JSON + + + + API + API + Incorporating API data into SPARQL query answers + + + + SPARQL + + + SPARQL + + + Incorporating API data into SPARQL query answers + JSON + + Incorporating API data into SPARQL query answers + + + + + + + + + + + + + Universite Libre de Bruxelles + + + Université Libre de Bruxelles + Universite Libre de Bruxelles + + Université Libre de Bruxelles + Université Libre de Bruxelles + + + Universite Libre de Bruxelles + + + + + + + 2016-10-19T11:20:00 + + 2016-10-19T11:00:00 + 2016-10-19T11:20:00 + Bahaa Eldesouky, Menna Bakry, Heiko Maus and Andreas Dengel + Seed, an End-user Text Composition Tool for the Semantic Web + 2016-10-19T11:00:00 + + 2016-10-19T11:20:00 + Seed, an End-user Text Composition Tool for the Semantic Web + + 2016-10-19T11:20:00 + + + + + + + + + + + + + + + fluid Operations AG + fluid Operations AG + fluid Operations AG + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + information extraction + information extraction + Full-fledged enterprise information can be a great weapon in investment analysis. However, enterprise information is scattered in different databases and websites. The information from a single source is incomplete and also suffers from noise. It is not an easy task to integrate and utilize information from diverse sources in real business scenarios. In this paper, we present an approach to build knowledge graphs (KGs) by exploiting semantic technologies to reconcile the data from diverse sources incrementally. We build a national-wide enterprise KG which incorporates information about 40,000,000 enterprises in China. We also provide querying about enterprises and data visualization capabilities as well as novel investment analysis scenarios, including finding an enterprise's real controllers, innovative enterprise analysis, enterprise path discovery and so on. The KG and its applications are currently used by two security companies in their investment banking businesses. + + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + data fusion + + D2R + + investment analysis + D2R + knowledge graphs + + + investment analysis + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + + + + data fusion + Full-fledged enterprise information can be a great weapon in investment analysis. However, enterprise information is scattered in different databases and websites. The information from a single source is incomplete and also suffers from noise. It is not an easy task to integrate and utilize information from diverse sources in real business scenarios. In this paper, we present an approach to build knowledge graphs (KGs) by exploiting semantic technologies to reconcile the data from diverse sources incrementally. We build a national-wide enterprise KG which incorporates information about 40,000,000 enterprises in China. We also provide querying about enterprises and data visualization capabilities as well as novel investment analysis scenarios, including finding an enterprise's real controllers, innovative enterprise analysis, enterprise path discovery and so on. The KG and its applications are currently used by two security companies in their investment banking businesses. + + knowledge graphs + + + + + + + + + + + + + + + + + + + + + + + + + + + + fbd2145490116fd5dc78374fdbc51bf12ed646bb + + + + Le Tuan Anh + + Le Tuan Anh + + + + Le Tuan Anh + + + + + + + + + SPORTAL: Searching for Public SPARQL Endpoints + + web querying + + + web querying + + + + SPORTAL: Searching for Public SPARQL Endpoints + + linked data + sparql + + + + SPORTAL: Searching for Public SPARQL Endpoints + There are hundreds of SPARQL endpoints on the Web, but finding an endpoint relevant to a client's needs is difficult: each endpoint acts like a black box, often without a description of its content. Herein we briefly describe SPORTAL: a system that collects meta-data about the content of endpoints and collects them into a central catalogue over which clients can search. SPORTAL sends queries to individual endpoints offline to learn about their content, generating a best-effort VoID description for each endpoint. These descriptions can then be searched and queried over by clients in the SPORTAL user interface, for example, to find endpoints that contain instances of a given class, or triples with a given predicate, or more complex requests such as endpoints with at least 1,000 images of people. Herein we give a brief overview of SPORTAL, its design and functionality, and the features that shall be demoed at the conference. + + There are hundreds of SPARQL endpoints on the Web, but finding an endpoint relevant to a client's needs is difficult: each endpoint acts like a black box, often without a description of its content. Herein we briefly describe SPORTAL: a system that collects meta-data about the content of endpoints and collects them into a central catalogue over which clients can search. SPORTAL sends queries to individual endpoints offline to learn about their content, generating a best-effort VoID description for each endpoint. These descriptions can then be searched and queried over by clients in the SPORTAL user interface, for example, to find endpoints that contain instances of a given class, or triples with a given predicate, or more complex requests such as endpoints with at least 1,000 images of people. Herein we give a brief overview of SPORTAL, its design and functionality, and the features that shall be demoed at the conference. + sparql + + + linked data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jérôme Euzenat + + + + + Jérôme Euzenat + 8347bc1d33e9e27f680aaaffa43de37a27f12991 + + + Jérôme Euzenat + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Traversal-based Query Execution + Traversal-based Query Execution + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + The emergence of Linked Data on the WWW has spawned research interest in an online execution of declarative queries over this data. A particularly interesting approach is traversal-based query execution which fetches data by traversing data links and, thus, is able to make use of up-to-date data from initially unknown data sources. While the downside of this approach is the delay before the query engine completes a query execution, user perceived response time may be improved significantly by returning as many elements of the result set as soon as possible. To this end, the query engine requires a traversal strategy that enables the engine to fetch result-relevant data as early as possible. The challenge for such a strategy is that the query engine does not know a priori what data sources will be discovered during the query execution and which of them contain result-relevant data. In this paper, we investigate 14 different approaches to rank traversal steps and achieve a variety of traversal strategies. We experimentally study their impact on response times and compare them to a baseline that resembles a breadth-first traversal. While our experiments show that some of the approaches can achieve noteworthy improvements over the baseline in a significant number of cases, we also observe that for every approach, there is a non-negligible chance to achieve response times that are worse than the baseline. + Query Processing + + + + + + The emergence of Linked Data on the WWW has spawned research interest in an online execution of declarative queries over this data. A particularly interesting approach is traversal-based query execution which fetches data by traversing data links and, thus, is able to make use of up-to-date data from initially unknown data sources. While the downside of this approach is the delay before the query engine completes a query execution, user perceived response time may be improved significantly by returning as many elements of the result set as soon as possible. To this end, the query engine requires a traversal strategy that enables the engine to fetch result-relevant data as early as possible. The challenge for such a strategy is that the query engine does not know a priori what data sources will be discovered during the query execution and which of them contain result-relevant data. In this paper, we investigate 14 different approaches to rank traversal steps and achieve a variety of traversal strategies. We experimentally study their impact on response times and compare them to a baseline that resembles a breadth-first traversal. While our experiments show that some of the approaches can achieve noteworthy improvements over the baseline in a significant number of cases, we also observe that for every approach, there is a non-negligible chance to achieve response times that are worse than the baseline. + SPARQL + + + Linked Data Queries + SPARQL + Query Processing + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + Linked Data Queries + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + + + + knowledge graphs + + faceted search + + + + OWL + + knowledge graphs + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + In this demo we present the SemFacet system for faceted search over ontology enhanced Knowledge Graphs (KGs) stored in RDF. SemFacet allows users to query KGs with relatively complex SPARQL queries via an intuitive Amazon-like interface. SemFacet can compute faceted interfaces over large scale RDF datasets by relying on incremental algorithms and over large ontologies by exploiting ontology projection techniques. SemFacet relies on an in-memory triple store and current implementation bundles JRDFox, Sesame, Stardog, and PAGOdA. During the demonstration the attendees can try SemFacet by exploring Yago KG. + + SPARQL + + RDF + + faceted search + + + OWL + + + + In this demo we present the SemFacet system for faceted search over ontology enhanced Knowledge Graphs (KGs) stored in RDF. SemFacet allows users to query KGs with relatively complex SPARQL queries via an intuitive Amazon-like interface. SemFacet can compute faceted interfaces over large scale RDF datasets by relying on incremental algorithms and over large ontologies by exploiting ontology projection techniques. SemFacet relies on an in-memory triple store and current implementation bundles JRDFox, Sesame, Stardog, and PAGOdA. During the demonstration the attendees can try SemFacet by exploring Yago KG. + + + + RDF + + + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + SPARQL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + smart appliances + + standardization + Household appliances are set to become highly intelligent, smart and networked devices in the near future. Systematically deployed on the Internet of Things (IoT), they would be able to form complete energy consuming, producing, and managing ecosystems. Smart systems are technically very heterogeneous, and standardized interfaces on a sensor and device level are therefore needed. However, standardization in IoT has largely focused at the technical communication level, leading to a large number of different solutions based on various standards and protocols, with limited attention to the common semantics contained in the message data structures exchanged at the technical level. The Smart Appliance REFerence ontology (SAREF) is a shared model of consensus developed in close interaction with the industry and with the support of the European Commission. It is published as a technical specification by ETSI and provides an important contribution to achieve semantic interoperability for smart appliances. This paper builds on the success achieved in standardizing SAREF and presents SAREF4EE, an extension of SAREF. SAREF4EE has been created in collaboration with the EEBus and Energy@Home industry associations to interconnect their (different) data models. By using SAREF4EE, smart appliances from different manufacturers that support the EEBus or Energy@Home standards can easily communicate with each other using any energy management system at home or in the cloud. + Internet of Things + + + + semantic interoperability + semantic interoperability + + + + + + + + + smart appliances + + + Internet of Things + + + Ontology + standardization + + Interoperability for Smart Appliances in the IoT World + + Ontology + Interoperability for Smart Appliances in the IoT World + Household appliances are set to become highly intelligent, smart and networked devices in the near future. Systematically deployed on the Internet of Things (IoT), they would be able to form complete energy consuming, producing, and managing ecosystems. Smart systems are technically very heterogeneous, and standardized interfaces on a sensor and device level are therefore needed. However, standardization in IoT has largely focused at the technical communication level, leading to a large number of different solutions based on various standards and protocols, with limited attention to the common semantics contained in the message data structures exchanged at the technical level. The Smart Appliance REFerence ontology (SAREF) is a shared model of consensus developed in close interaction with the industry and with the support of the European Commission. It is published as a technical specification by ETSI and provides an important contribution to achieve semantic interoperability for smart appliances. This paper builds on the success achieved in standardizing SAREF and presents SAREF4EE, an extension of SAREF. SAREF4EE has been created in collaboration with the EEBus and Energy@Home industry associations to interconnect their (different) data models. By using SAREF4EE, smart appliances from different manufacturers that support the EEBus or Energy@Home standards can easily communicate with each other using any energy management system at home or in the cloud. + + Interoperability for Smart Appliances in the IoT World + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Semantic annotation + + ontology + + + + + A Reuse-based Annotation Approach for Medical Documents + + + A Reuse-based Annotation Approach for Medical Documents + + + medical documents + Annotations are useful to semantically enrich documents and other datasets with concepts of standardized vocabularies and ontologies. In the medical domain, many documents are not annotated at all and manual annotation is a difficult and time-consuming process. Therefore, automatic annotation methods become necessary to support human annotators with recommendations. We propose a reuse-based annotation approach that clusters items in medical documents according to verified ontology-based annotations. We identify a set of representative features for annotation clusters and propose a context-based selection strategy that considers the semantic relatedness and frequent co-occurrences of annotated concepts. We evaluate our methods and the annotation tool MetaMap based on reference mappings between medical forms and the Unified Medical Language System. + + + + Annotations are useful to semantically enrich documents and other datasets with concepts of standardized vocabularies and ontologies. In the medical domain, many documents are not annotated at all and manual annotation is a difficult and time-consuming process. Therefore, automatic annotation methods become necessary to support human annotators with recommendations. We propose a reuse-based annotation approach that clusters items in medical documents according to verified ontology-based annotations. We identify a set of representative features for annotation clusters and propose a context-based selection strategy that considers the semantic relatedness and frequent co-occurrences of annotated concepts. We evaluate our methods and the annotation tool MetaMap based on reference mappings between medical forms and the Unified Medical Language System. + A Reuse-based Annotation Approach for Medical Documents + medical documents + UMLS + UMLS + + ontology + Semantic annotation + + + + + + + + + + + + eb165935f24f2953766c8aea84e423e7e6161ff4 + + + Peter Mechant + Peter Mechant + Peter Mechant + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + USC / Information Sciences Institute + USC / Information Sciences Institute + + + + + + USC / Information Sciences Institute + + + + + + + + + 2016-10-19T15:20:00 + 2016-10-19T15:00:00 + Laura M. Daniele, Monika Solanki, Frank Den Hartog and Jasper Roes + + Interoperability for Smart Appliances in the IoT World + 2016-10-19T15:20:00 + + Interoperability for Smart Appliances in the IoT World + + 2016-10-19T15:20:00 + 2016-10-19T15:00:00 + 2016-10-19T15:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Mannheim + + + University of Mannheim + + University of Mannheim + + + + + + + + + Marco Brambilla + + + + + + Marco Brambilla + + 2bffe0887e5bf93e4fe6db921c7f6798a9e65201 + Marco Brambilla + + + + + + + + + + + + + + + + + + + + + + eb6f0b160b878341c5b8feb6b52ab68305ec6433 + + + Giancarlo Guizzardi + Giancarlo Guizzardi + Giancarlo Guizzardi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Francois Goasdoue + + + Francois Goasdoue + 226862c37f70b29f9a1b6304a1dad68473ff91c9 + + Francois Goasdoue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Keywords + + + + + + + + + OBDA + + + + + + + + In ontology-based data access (OBDA) the users access relational databases (RDBs) via ontologies that mediate between the users and the data. Ontologies are connected to data via declarative ontology-to-RDB mappings that relate each ontological term to an SQL query. In this demo we present our system KeywDB that facilitates construction of ontology-to-RDB mappings in an interactive fashion. In KeywDB users provide examples of entities for classes that require mappings and the system returnes a ranked list of such mappings. In doing so KeywDB relies on techniques for keyword query answering over RDBs. During the demo the attendees will try KeywDB with NorthWind and NPD FP databases and collections of mappings that we prepare. + + + + + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + + + + + OBDA + + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + + Mapping construction + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + + + In ontology-based data access (OBDA) the users access relational databases (RDBs) via ontologies that mediate between the users and the data. Ontologies are connected to data via declarative ontology-to-RDB mappings that relate each ontological term to an SQL query. In this demo we present our system KeywDB that facilitates construction of ontology-to-RDB mappings in an interactive fashion. In KeywDB users provide examples of entities for classes that require mappings and the system returnes a ranked list of such mappings. In doing so KeywDB relies on techniques for keyword query answering over RDBs. During the demo the attendees will try KeywDB with NorthWind and NPD FP databases and collections of mappings that we prepare. + Keywords + Mapping construction + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:30:00 + 2016-10-21T11:50:00 + Building event-centric knowledge graphs from news + + Marco Rospocher, Marieke van Erp, Piek Vossen, Antske Fokkens, Itziar Aldabe, German Rigau, Aitor Soroa, Thomas Ploeger, Tessel Bogaard + Building event-centric knowledge graphs from news + 2016-10-21T11:30:00 + + + 2016-10-21T11:50:00 + + + Haiphong University + + + Haiphong University + + + + + + + + Haiphong University + + + + 2016-10-18T15:15:00 + 2016-10-18T15:45:00 + 2016-10-18T15:45:00 + 2016-10-18T15:45:00 + Poster 2 + 2016-10-18T15:45:00 + Poster 2 + 2016-10-18T15:15:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + GeoSPARQL + + + + + geospatial OBDA + GeoSPARQL + Ontop of Geospatial Databases + + + + + In this paper we propose an OBDA approach for accessing geospatial data stored in geospatial relational databases, using the OGC standard GeoSPARQL and R2RML or OBDA mappings. We introduce extensions to existing SPARQL-to-SQL approaches to support GeoSPARQL features. We describe the implementation of our approach in the system ontop-spatial, an extension of the OBDA system Ontop for creating virtual geospatial RDF graphs on top of geospatial relational databases. Last, we present an experimental evaluation of our system using workload and queries from a recent benchmark. In order to measure the performance of our system, we compare it to the state-of-the-art geospatial RDF store, and confirm its efficiency. + + + In this paper we propose an OBDA approach for accessing geospatial data stored in geospatial relational databases, using the OGC standard GeoSPARQL and R2RML or OBDA mappings. We introduce extensions to existing SPARQL-to-SQL approaches to support GeoSPARQL features. We describe the implementation of our approach in the system ontop-spatial, an extension of the OBDA system Ontop for creating virtual geospatial RDF graphs on top of geospatial relational databases. Last, we present an experimental evaluation of our system using workload and queries from a recent benchmark. In order to measure the performance of our system, we compare it to the state-of-the-art geospatial RDF store, and confirm its efficiency. + + + Ontop of Geospatial Databases + geospatial databases + + geospatial OBDA + geospatial databases + + Ontop of Geospatial Databases + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 4806a6b7bed87a607cf39dc4db433512253f594f + + + Allan Third + + Allan Third + + + + + Allan Third + + + + + + 2016-10-18T12:00:00 + 2016-10-18T12:30:00 + 2016-10-18T12:30:00 + 2016-10-18T12:30:00 + 2016-10-18T12:00:00 + Poster 1 + Poster 1 + 2016-10-18T12:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ralf Möller + Ralf Möller + + eab5f5d1e7d7c9a899ff51817294075e192ce2fc + + Ralf Möller + + + + + + Eleni Kaldoudi + Eleni Kaldoudi + + + + Eleni Kaldoudi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 24544b392104f565a6dee56a057f2ad429b32949 + Louis Jachiet + + + + + + + Louis Jachiet + + Louis Jachiet + + + + + + National Agriculture and Food Research Organization + + + + + + + + National Agriculture and Food Research Organization + + + National Agriculture and Food Research Organization + + + + + + + + + Hitachi, Ltd. + + + Hitachi, Ltd. + Hitachi, Ltd. + + + + + + + + + + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. Of course, modern RSP have to address the volume and velocity characteristics encountered in the Big Data era. This comes at the price of designing high throughput, low latency, fault tolerant, highly available and scalable engines. The cost of implementing such systems from scratch is very high and usually one prefers to program components on top of a framework that possesses these properties, e.g., Apache Hadoop or Apache Spark. The research conducting in this PhD adopts this approach and aims to create a production-ready RSP engine which will be based on domain standards, e.g., Apache Kafka and Spark Streaming. In a nutshell, the engine aims to i) address basic event modeling - to guarantee the completeness of input data in window operators, ii) process real-time RDF stream in a distributed manner - efficient RDF stream handling is required; iii) support and extend common continuous SPARQL syntax - easy-to-use, adapt to the industrial needs and iv) support reasoning services at both the data preparation and query processing levels. + + Distributed Computing + + + + + + Semantic Web + + Semantic Web + + RDF + Towards a distributed, scalable and real-time RDF Stream Processing engine + + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. Of course, modern RSP have to address the volume and velocity characteristics encountered in the Big Data era. This comes at the price of designing high throughput, low latency, fault tolerant, highly available and scalable engines. The cost of implementing such systems from scratch is very high and usually one prefers to program components on top of a framework that possesses these properties, e.g., Apache Hadoop or Apache Spark. The research conducting in this PhD adopts this approach and aims to create a production-ready RSP engine which will be based on domain standards, e.g., Apache Kafka and Spark Streaming. In a nutshell, the engine aims to i) address basic event modeling - to guarantee the completeness of input data in window operators, ii) process real-time RDF stream in a distributed manner - efficient RDF stream handling is required; iii) support and extend common continuous SPARQL syntax - easy-to-use, adapt to the industrial needs and iv) support reasoning services at both the data preparation and query processing levels. + Towards a distributed, scalable and real-time RDF Stream Processing engine + Stream Processing + + + RSP + RDF + Towards a distributed, scalable and real-time RDF Stream Processing engine + Stream Processing + + RSP + + Distributed Computing + + + + + + data integration + + class-class relationships + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + + + data integration + linked data + Linked Open Data (LOD) is a powerful mechanism for linking different datasets published on the Web, which is expected to create new value of data through mash-up over various datasets on the Web. One of the important needs to obtain data from LOD is to find a path of resources connecting given two classes, each of which has an end resource of the path. +In this study, the two technologies for the approach are introduced: a labeled multi graph named class graph to compute class-class relationships and an RDF specification named SPARQL Builder Metadata to obtain and store required metadata for construction of a class graph. In addition, as a practical application, we introduce the SPARQL Builder system, which assists users in writing semantic queries for LOD. + Linked Open Data (LOD) is a powerful mechanism for linking different datasets published on the Web, which is expected to create new value of data through mash-up over various datasets on the Web. One of the important needs to obtain data from LOD is to find a path of resources connecting given two classes, each of which has an end resource of the path. +In this study, the two technologies for the approach are introduced: a labeled multi graph named class graph to compute class-class relationships and an RDF specification named SPARQL Builder Metadata to obtain and store required metadata for construction of a class graph. In addition, as a practical application, we introduce the SPARQL Builder system, which assists users in writing semantic queries for LOD. + + + + + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + + + + class-class relationships + + + + + + + linked data + + + + + + + + + + + + + + Ordnance Survey Ireland + + Ordnance Survey Ireland + Ordnance Survey Ireland + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Guus Schreiber + + Guus Schreiber + Guus Schreiber + + + + + + + + + + + + 6af8641a60a97dbcc8df40418611e5ada4884405 + + + + + + Sebastian Brandt + Sebastian Brandt + + + + + + Sebastian Brandt + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vanessa Lopez, Pierpaolo Tommasi, Spyros Kotoulas and Jiewen Wu + 2016-10-21T14:30:00 + + 2016-10-21T14:30:00 + + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + 2016-10-21T14:30:00 + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + 2016-10-21T14:10:00 + 2016-10-21T14:30:00 + + 2016-10-21T14:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + disambiguation + + + + Qur'an + + Qur'an + + In this paper we illustrate how we harness the power of crowds and specialized experts through automated knowledge acquisition workflows for semantic annotation in specialized and knowledge intensive domains. We undertake the special case of the Arabic script of the Qur'an, a widely studied manuscript, and apply a hybrid methodology of traditional 'crowdsourcing' augmented with 'expertsourcing' for semantically annotating its verses. We demonstrate that our proposed hybrid method presents a promising approach for achieving reliable annotations in an efficient and scalable manner, especially in cases where a high level of accuracy is required in knowledge intense and sensitive domains. + + + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + In this paper we illustrate how we harness the power of crowds and specialized experts through automated knowledge acquisition workflows for semantic annotation in specialized and knowledge intensive domains. We undertake the special case of the Arabic script of the Qur'an, a widely studied manuscript, and apply a hybrid methodology of traditional 'crowdsourcing' augmented with 'expertsourcing' for semantically annotating its verses. We demonstrate that our proposed hybrid method presents a promising approach for achieving reliable annotations in an efficient and scalable manner, especially in cases where a high level of accuracy is required in knowledge intense and sensitive domains. + disambiguation + + + + + semantic annotation + + + + semantic annotation + + + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + + + + + + + + + + + + + + + + + + + + c31290b982c8a8bf62516fc1b00799c03cab7b70 + + + + + Jonas Bulegon Gassen + + + + Jonas Bulegon Gassen + Jonas Bulegon Gassen + + + + + + + + + + + + + + Steffen Lohmann + + Steffen Lohmann + + + 2c3ceffc433179f5b0b2a067fccf89d08d4cad28 + + Steffen Lohmann + + + + + + + + + + + + + + + + + + + + + + + + + + + Mondeca + Mondeca + + + + Mondeca + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 8e4a9cdb6f8940a89d0ade82e430471527ac62fe + Kazuhisa Seta + + Kazuhisa Seta + + + Kazuhisa Seta + + + + + + + 2016-10-20T18:30:00 + 2016-10-20T21:30:00 + 2016-10-20T21:30:00 + 2016-10-20T21:30:00 + 2016-10-20T18:30:00 + 2016-10-20T21:30:00 + Dinner + Dinner + + + 2016-10-19T18:00:00 + + Makoto Urakawa, Masaru Miyazaki, Hiroshi Fujisawa, Masahide Naemura and Ichiro Yamada + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + 2016-10-19T21:00:00 + + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + Open Microscopy Environment + Microscopy image + + + + Microscopy image + Open Microscopy Environment + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + Imaging data are fundamental to life sciences. We aimed to construct a microscopy ontology for an integrated metadata database of optical and electron microscopy images combined with various bio-entities. To realise this, we applied the Resource Description Framework (RDF) to an Open Microscopy Environment (OME) data model, which is the de facto standard to describe optical microscopy images and experimental data. We translated the XML-based OME metadata into the base concept of Web Ontology Language (OWL) as a trial of developing microscopy ontology. We describe the OWL-based ontology of microscopy imaging data and propose 18 upper-level concepts of ontology with missing concepts such as electron microscopy, phenotype data, biosample, and imaging conditions. + + Metadata + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + + + + + + + + + + Imaging data are fundamental to life sciences. We aimed to construct a microscopy ontology for an integrated metadata database of optical and electron microscopy images combined with various bio-entities. To realise this, we applied the Resource Description Framework (RDF) to an Open Microscopy Environment (OME) data model, which is the de facto standard to describe optical microscopy images and experimental data. We translated the XML-based OME metadata into the base concept of Web Ontology Language (OWL) as a trial of developing microscopy ontology. We describe the OWL-based ontology of microscopy imaging data and propose 18 upper-level concepts of ontology with missing concepts such as electron microscopy, phenotype data, biosample, and imaging conditions. + + + RDF/OWL + + Metadata + + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + + RDF/OWL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Benjamin Cogrel + Benjamin Cogrel + + + + + + + Benjamin Cogrel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T14:50:00 + A Probabilistic Model for Time-Aware Entity Recommendation + + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + + A Probabilistic Model for Time-Aware Entity Recommendation + + Lei Zhang and Achim Rettinger + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Christophe Debruyne, Eamonn Clinton, Lorraine McNerney, Atul Nautiyal and Declan O'Sullivan + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Serving Ireland's Geospatial Information as Linked Data + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Serving Ireland's Geospatial Information as Linked Data + + + + + + + + + + + + + + + + + + + + + + + + + + Semantic MediaWiki + + + Information extraction + + + In the development departments of some manufacturing companies, +there are weekly reports describing the status of events but they are poorly structured plain texts. +In this report, we propose a method for constructing semantic networks of development activities from weekly reports. +Our ontology-based method extracts things like events, status and agents from the reports and constructs relations +between them and creates Semantic MediaWiki pages from the semantic networks to visualize development activities. +We show a use case to apply the method to actual weekly reports and internal documents of a development department. + + Constructing Semantic Networks of Development Activities from Weekly Reports + + + Development activity + + + Constructing Semantic Networks of Development Activities from Weekly Reports + + + + Constructing Semantic Networks of Development Activities from Weekly Reports + Semantic MediaWiki + Development activity + + Information extraction + In the development departments of some manufacturing companies, +there are weekly reports describing the status of events but they are poorly structured plain texts. +In this report, we propose a method for constructing semantic networks of development activities from weekly reports. +Our ontology-based method extracts things like events, status and agents from the reports and constructs relations +between them and creates Semantic MediaWiki pages from the semantic networks to visualize development activities. +We show a use case to apply the method to actual weekly reports and internal documents of a development department. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Xin Wang + + edd615f4b8d0a61bdd8770ec087c4b2ff442125f + + Xin Wang + Xin Wang + + + + + + + Queen Mary University of London + + + + Queen Mary University of London + + + Queen Mary University of London + + + + + + + + + + + + Shanghai Polytechnic University, China + Shanghai Polytechnic University, China + + + Shanghai Polytechnic University, China + + + + + + + Triple Extraction + Data Mining + + Extracting Semantic Information for e-Commerce + + Extracting Semantic Information for e-Commerce + + Ontology Learning + + e-commerce + + + + Rakuten + Rakuten Ichiba uses a taxonomy to organize the items it sells. Currently, the taxonomy classes that are relevant in terms of profit generation and difficulty of exploration are being manually extended with data properties deemed helpful to create pages that improve the user search experience and ultimately the conversion rate. In this paper we present a scalable approach that aims to automate this process, automatically selecting the relevant and semantically homogenous subtrees in the taxonomy, extracting from semi-structured text in items descriptions a core set of properties and a popular subset of their ranges, then ex- tending the covered range using relational similarities in free text. Additionally, our process automatically tags the items with the new semantic information and exposes them as RDF triples. We present a set of experiments showing the effectiveness of our approach in this business context. + + Triple Extraction + Rakuten + + + + + Extracting Semantic Information for e-Commerce + Rakuten Ichiba uses a taxonomy to organize the items it sells. Currently, the taxonomy classes that are relevant in terms of profit generation and difficulty of exploration are being manually extended with data properties deemed helpful to create pages that improve the user search experience and ultimately the conversion rate. In this paper we present a scalable approach that aims to automate this process, automatically selecting the relevant and semantically homogenous subtrees in the taxonomy, extracting from semi-structured text in items descriptions a core set of properties and a popular subset of their ranges, then ex- tending the covered range using relational similarities in free text. Additionally, our process automatically tags the items with the new semantic information and exposes them as RDF triples. We present a set of experiments showing the effectiveness of our approach in this business context. + Machine Learning + Data Mining + Ontology Learning + Machine Learning + + + + e-commerce + + + + + + + + + + + + + + + + + + + Adam Sotona + + + Adam Sotona + + + + + 571a1fb7a88168822ba35590b5ab4e9c7f47336c + Adam Sotona + + + + + + + + + + + + 292f9640243b1369299b50ffada02682bd7e70b4 + + + Zhijia Fang + + + + Zhijia Fang + Zhijia Fang + + + + + Representing RDF Stream Processing Queries in RSP-SPIN + + + RSP-QL + A number of RDF Stream Processing (RSP) systems have been developed to support the processing of streaming Linked Data, however, due to the lack of a standardized query language they all provide different SPARQL extensions. The RSP Community Group is in the process of developing a standardized RSP query language (RSP-QL), which incorporates many of features of existing RSP language extensions. In this demo paper we describe how RSP-SPIN, a SPIN extension for representing RSP-QL queries, can be used to encapsulate RSP queries as RDF, forming a syntax agnostic representation that can be used to support serialization into multiple RSP language extensions. This could be useful, for example, to reduce the effort required to produce and maintain RSP benchmarks, since developers can focus on a single representation per query, rather than manually implementing and validating queries for several languages in parallel. + + + RDF Stream Processing + RSP-SPIN + Representing RDF Stream Processing Queries in RSP-SPIN + RDF Stream Processing + + + + + + + RSP-QL + + A number of RDF Stream Processing (RSP) systems have been developed to support the processing of streaming Linked Data, however, due to the lack of a standardized query language they all provide different SPARQL extensions. The RSP Community Group is in the process of developing a standardized RSP query language (RSP-QL), which incorporates many of features of existing RSP language extensions. In this demo paper we describe how RSP-SPIN, a SPIN extension for representing RSP-QL queries, can be used to encapsulate RSP queries as RDF, forming a syntax agnostic representation that can be used to support serialization into multiple RSP language extensions. This could be useful, for example, to reduce the effort required to produce and maintain RSP benchmarks, since developers can focus on a single representation per query, rather than manually implementing and validating queries for several languages in parallel. + RSP-SPIN + Representing RDF Stream Processing Queries in RSP-SPIN + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Protege plugin + + Rule + Modeling tool + + In our experience, some ontology modelers find it much easier to express logical axioms using rules rather than using OWL (or description logic) syntax. Based on recent theoretical developments on transformations between rules and description logics, we develop ROWL, a Protege plugin that allows users to enter OWL axioms by way of rules; the plugin then automatically converts these rules into OWL DL axioms if possible, and prompts the user in case such a conversion is not possible without weakening the semantics of the rule. + + Modeling OWL with Rules: The ROWL Protege Plugin + ROWL + + Rule-to-OWL transformation + + Protege + + + + + + Rule + Modeling tool + + ROWL + + + + + + + Modeling OWL with Rules: The ROWL Protege Plugin + Rule-to-OWL transformation + Protege plugin + Protege + In our experience, some ontology modelers find it much easier to express logical axioms using rules rather than using OWL (or description logic) syntax. Based on recent theoretical developments on transformations between rules and description logics, we develop ROWL, a Protege plugin that allows users to enter OWL axioms by way of rules; the plugin then automatically converts these rules into OWL DL axioms if possible, and prompts the user in case such a conversion is not possible without weakening the semantics of the rule. + + + + + Modeling OWL with Rules: The ROWL Protege Plugin + + + + + + + + + + Markus Krötzsch + + + + b7e14a29e6de4cc64fee9080f4e4db77a4226769 + Markus Krötzsch + + + Markus Krötzsch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + 2016-10-19T21:00:00 + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + + 2016-10-19T21:00:00 + Masao Watanabe, Kazunari Hashimoto, Seiya Inagi, Yohei Yamane, Seiji Suzuki and Hiroshi Umemoto + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + Gulbenkian Science Institute + + + + Gulbenkian Science Institute + + + + + Gulbenkian Science Institute + + + Yutaka Mitsuishi + + + + + + + + + + + 55ac3c4c8c976c25d3ca0660b8dc64bc263d32e7 + + Yutaka Mitsuishi + Yutaka Mitsuishi + + + + + + + + + + Alberto Tonon + Alberto Tonon + + + Alberto Tonon + + + 3b41efd1acf36406793166b43659b542c431776f + + + + Spyros Kotoulas + Spyros Kotoulas + + + + + + Spyros Kotoulas + + e879e287903caecdd41354eb5ae7aff6d9bc741b + + + + + + + + 1e42eb62ecf4ba2ca58a7d728117cc1458d12f4d + + + + Martin Rezk + Martin Rezk + + + + + Martin Rezk + + + + + + + + + Paramita Mirza + + + + 67eff0bc075f39ef28dfed00e58d3a115350ce0d + Paramita Mirza + + + + + + Paramita Mirza + + + + + + + + + + + + + + + 2016-10-20T10:50:00 + + + 2016-10-20T10:30:00 + Armen Inants, Manuel Atencia and Jérôme Euzenat + 2016-10-20T10:50:00 + + Algebraic calculi for weighted ontology alignments + 2016-10-20T10:50:00 + Algebraic calculi for weighted ontology alignments + 2016-10-20T10:50:00 + 2016-10-20T10:30:00 + + + + + + + Tessel Bogaard + + + + + + + + Tessel Bogaard + Tessel Bogaard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + A RDF based Portal of Biological Phenotype Data produced in Japan + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + A RDF based Portal of Biological Phenotype Data produced in Japan + Terue Takatsuki, Mikako Saito, Sadahiro Kumagai, Eiki Takayama, Kazuya Ohshima, Nozomu Ohshiro, Kai Lenz, Nobuhiko Tanaka, Norio Kobayashi and Hiroshi Masuya + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + Word embedding + + + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + + + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + + Semantic matching + Aiming to link the records that refer to the same entity across multiple databases in different languages, we address the mismatches of wordings between literal translations of metadata in source language and metadata in target language, which cannot be calculated by string-based measures. In this paper, we propose a method based on word embedding, which can capture the semantic similarity relationships among words. The effectiveness of this method is confirmed in linking the same records between Ukiyo-e (Japanese traditional woodblock printing) databases in Japanese and English. This method could be applied to other languages since it makes little assumption about languages. + Cross-language record linkage + + Aiming to link the records that refer to the same entity across multiple databases in different languages, we address the mismatches of wordings between literal translations of metadata in source language and metadata in target language, which cannot be calculated by string-based measures. In this paper, we propose a method based on word embedding, which can capture the semantic similarity relationships among words. The effectiveness of this method is confirmed in linking the same records between Ukiyo-e (Japanese traditional woodblock printing) databases in Japanese and English. This method could be applied to other languages since it makes little assumption about languages. + Word embedding + + Semantic matching + Similarity measurement + Similarity measurement + + + + Cross-language record linkage + + + + + + Social Meaning + According to Semantic Web standards, IRIs are individual +constants or predicate letters whose names are chosen arbitrarily +and carry no formal meaning. At the same time it is a well-known +aspect of Semantic Web pragmatics that IRIs are often constructed +mnemonically, in order to be meaningful to a human interpreter. +The latter has traditionally been termed 'Social Meaning', a +concept that has been discussed but not yet quantitatively +studied by the Semantic Web community. + +In this paper we use statistical model learning as a method to +quantify the meaning that is (at least) encoded in Semantic Web +names, We implement the approach and evaluate it over hundreds of +thousands of data sets in order to illustrate its efficacy. Our +experiments confirm that many Semantic Web names are indeed +meaningful and, more interestingly, we provide a quantitative +lower bound on how much meaning is (at least) encoded in names on +a per-dataset basis. + +To our knowledge, this is the first paper about the interaction +between social and formal meaning, as well as the first paper +that uses statistical model learning as a method to quantify +meaning in the Semantic Web context. + + + According to Semantic Web standards, IRIs are individual +constants or predicate letters whose names are chosen arbitrarily +and carry no formal meaning. At the same time it is a well-known +aspect of Semantic Web pragmatics that IRIs are often constructed +mnemonically, in order to be meaningful to a human interpreter. +The latter has traditionally been termed 'Social Meaning', a +concept that has been discussed but not yet quantitatively +studied by the Semantic Web community. + +In this paper we use statistical model learning as a method to +quantify the meaning that is (at least) encoded in Semantic Web +names, We implement the approach and evaluate it over hundreds of +thousands of data sets in order to illustrate its efficacy. Our +experiments confirm that many Semantic Web names are indeed +meaningful and, more interestingly, we provide a quantitative +lower bound on how much meaning is (at least) encoded in names on +a per-dataset basis. + +To our knowledge, this is the first paper about the interaction +between social and formal meaning, as well as the first paper +that uses statistical model learning as a method to quantify +meaning in the Semantic Web context. + + + + + + Social Meaning + + Semantics + + + + + + + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + + + Information Compression + + + + Information Compression + + + Semantics + + + + + + + + + + + + + + + + + + + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:30:00 + Robert Piro, Ian Horrocks, Peter Hendler, Yavor Nenov, Boris Motik, Michael Rossman and Scott Kimberly + Semantic Technologies for Data Analysis in Health Care + 2016-10-21T14:30:00 + Semantic Technologies for Data Analysis in Health Care + + + 2016-10-21T14:50:00 + + + + + + + + + + + Harald Eisenmann + + + a38f1850e7a622420ed1efd4c981c50c498236ca + Harald Eisenmann + + Harald Eisenmann + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Thomas Rebele + Thomas Rebele + + + + + + + + + Thomas Rebele + + f32adc2de92c61cbdd9827bf8cf7ec11ab59adf9 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Calabria + + + + + + University of Calabria + + + University of Calabria + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + A Protege Plugin with Swift Linked Data Miner + Jędrzej Potoniec and Agnieszka Ławrynowicz + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + A Protege Plugin with Swift Linked Data Miner + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:30:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T10:30:00 + Natural Language Processing + Natural Language Processing + 2016-10-21T11:50:00 + + + + Jörg Waitelonis + Jörg Waitelonis + + + + + Jörg Waitelonis + 8d447b7f5197ba83c5a83212d5b40bb3e7022caf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Univ. Rennes 1 and INRIA + + + + Univ. Rennes 1 and INRIA + + + + + Univ. Rennes 1 and INRIA + + + + + + + + + + + + + + + + + + + + + + + + Alo Allik, Mariano Mora-Mcginity, Gyorgy Fazekas and Mark Sandler + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + MusicWeb: music discovery with open linked semantic metadata + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + MusicWeb: music discovery with open linked semantic metadata + + + + + + + 393b51a0cc241618aa88bf7821c6def528afe7da + Werner Nutt + + + + Werner Nutt + + + + + Werner Nutt + + + + + + + + + + + + + + + + + + + + + + + 7e4f8b78f38f5fffb33304fc1fd80b43ccc792fa + + Giannis Mouchakis + + + + Giannis Mouchakis + Giannis Mouchakis + + + + + + + + + + + + + + + + + + + EURECOM + + EURECOM + + + + EURECOM + + + + + + + 2016-10-19T15:50:00 + 2016-10-19T17:10:00 + 2016-10-19T17:10:00 + 2016-10-19T17:10:00 + Minute Madness + 2016-10-19T15:50:00 + 2016-10-19T17:10:00 + Minute Madness + + + + Data Scaling + In this paper we present an experimental evaluation of VIG, a data scaler for OBDA benchmarks. Data scaling is a relatively recent approach, proposed in the database community, that allows for quickly scaling an input data instance to s times its size, while preserving certain application-specific characteristics. The advantages of scaling are that the generator is general, in the sense that it can be re-used on different database schemas, and that users are not required to manually input the data characteristics. VIG lifts the scaling approach from the database level to the OBDA level, where the domain information of ontologies and mappings has to be taken into account as well. + To evaluate the quality of VIG, in this paper we use it to generate data for the Berlin SPARQL Benchmark (BSBM), and compare it with the official BSBM data generator. + + + + Data Scaling + Evaluation + + + + + In this paper we present an experimental evaluation of VIG, a data scaler for OBDA benchmarks. Data scaling is a relatively recent approach, proposed in the database community, that allows for quickly scaling an input data instance to s times its size, while preserving certain application-specific characteristics. The advantages of scaling are that the generator is general, in the sense that it can be re-used on different database schemas, and that users are not required to manually input the data characteristics. VIG lifts the scaling approach from the database level to the OBDA level, where the domain information of ontologies and mappings has to be taken into account as well. + To evaluate the quality of VIG, in this paper we use it to generate data for the Berlin SPARQL Benchmark (BSBM), and compare it with the official BSBM data generator. + + OBDA Benchmark + An Evaluation of VIG with the BSBM Benchmark + + + + An Evaluation of VIG with the BSBM Benchmark + Evaluation + + + + + OBDA Benchmark + + An Evaluation of VIG with the BSBM Benchmark + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Damien Graux + + + + + + Damien Graux + + + + + + e04634d9f47999644ea729155de6ebac6736a5ef + + Damien Graux + + + + + 9cc447f0f4877bc04e7d6703bf040d7337094e65 + Martin Giese + + + + + Martin Giese + + Martin Giese + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Milano-Bicocca + + University of Milano-Bicocca + + + + + + + + University of Milano-Bicocca + + + + + + + + + + + + + + + + + + + + + + + + + erasmus mobility + + ontology matching + + + Compliance checking of business processes executed by auditors requires to analyze documents e.g. log files, business process models depending on requirements derived from reference guidelines. This paper presents a forward compliance checking application for facilitating conformant behavior by de-tecting organizational operations and their deviations based on these docu-ments in a semantic way. This application has been tested on the Internaliza-tion process in the respect of Erasmus mobility. + + + Semantic Audit Application + + + compliance check + + + ontology matching + + + ontology learning + ontology learning + Compliance checking of business processes executed by auditors requires to analyze documents e.g. log files, business process models depending on requirements derived from reference guidelines. This paper presents a forward compliance checking application for facilitating conformant behavior by de-tecting organizational operations and their deviations based on these docu-ments in a semantic way. This application has been tested on the Internaliza-tion process in the respect of Erasmus mobility. + + process ontology + Semantic Audit Application + + erasmus mobility + compliance check + + + process ontology + Semantic Audit Application + + + 2016-10-20T11:30:00 + + 2016-10-20T11:10:00 + 2016-10-20T11:30:00 + 2016-10-20T11:30:00 + 2016-10-20T11:10:00 + User validation in ontology alignment + 2016-10-20T11:30:00 + Zlatan Dragisic, Valentina Ivanova, Patrick Lambrix, Daniel Faria, Ernesto Jiménez-Ruiz and Catia Pesquita + User validation in ontology alignment + + + + + + + + + + + + + + + + + + + + + + Benedikt Kämpgen + + + + + Benedikt Kämpgen + + Benedikt Kämpgen + + + + + 2df16f8869e9f6a6120905b60dd8fdbeb27812b8 + + + + + + + + + + + + + + + ab378c183b37a86f1009f7719e499cb7f6b5f133 + + + + + + + + Gerard de Melo + + + + + Gerard de Melo + Gerard de Melo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Multiple datasets that add high value to biomedical research have been exposed on the web as part of the Life Sciences Linked Open Data (LS-LOD) Cloud. The ability to easily navigate through these datasets is crucial in order to draw meaningful biological co relations. However, navigating these multiple datasets is not trivial as most of these are only available as isolated SPARQL endpoints with very little vocabulary reuse. We propose an approach for Autonomous Resource Discovery and Indexing (ARDI), a set of configurable rules which can be used to discover links between biological entities in the LS-LOD cloud. We have catalogued and linked concepts and properties from 137 public SPARQL endpoints. The ARDI is used to dynamically assemble queries retrieving data from multiple SPARQL endpoints simultaneously. + + SPARQL Endpoint + + A - Posteriori Data Integration for Life Sciences + + A - Posteriori Data Integration for Life Sciences + Life Sciences Data + + Multiple datasets that add high value to biomedical research have been exposed on the web as part of the Life Sciences Linked Open Data (LS-LOD) Cloud. The ability to easily navigate through these datasets is crucial in order to draw meaningful biological co relations. However, navigating these multiple datasets is not trivial as most of these are only available as isolated SPARQL endpoints with very little vocabulary reuse. We propose an approach for Autonomous Resource Discovery and Indexing (ARDI), a set of configurable rules which can be used to discover links between biological entities in the LS-LOD cloud. We have catalogued and linked concepts and properties from 137 public SPARQL endpoints. The ARDI is used to dynamically assemble queries retrieving data from multiple SPARQL endpoints simultaneously. + + SPARQL Endpoint + + Life Sciences Data + + Autonomous Resource Discovery and Indexing + + Autonomous Resource Discovery and Indexing + A - Posteriori Data Integration for Life Sciences + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Twitter + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + + + + + Sentiment analysis + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + + SemEval + + Twitter + + Replicate study + + + SemEval + We performed a thorough replicate study of the top systems performing in the yearly SemEval Twitter Sentiment Analysis task. We highlight some differences between the results obtained by the top systems and the ones we are able to compute. We also propose SentiME, an ensemble system composed of 5 state-of-the-art sentiment classifiers. SentiME first trains the different classifiers using the Bootstrap Aggregating Algorithm. The classification results are then aggregated using a linear function that averages the classification distributions of the different classifiers. SentiME has also been tested over the SemEval2015 test set, properly trained with the SemEval2015 train test, outperforming the best ranked system of the challenge. + + + Replicate study + + + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + + We performed a thorough replicate study of the top systems performing in the yearly SemEval Twitter Sentiment Analysis task. We highlight some differences between the results obtained by the top systems and the ones we are able to compute. We also propose SentiME, an ensemble system composed of 5 state-of-the-art sentiment classifiers. SentiME first trains the different classifiers using the Bootstrap Aggregating Algorithm. The classification results are then aggregated using a linear function that averages the classification distributions of the different classifiers. SentiME has also been tested over the SemEval2015 test set, properly trained with the SemEval2015 train test, outperforming the best ranked system of the challenge. + + + Sentiment analysis + + + + Improving Open Data Usability through Semantics + With the success of Open Data a huge amount of tabular data become available that could potentially be mapped and linked into the Web of (Linked) Data. The use of semantic web technologies would then allow to explore related content and enhanced search functionalities across data portals. However, existing linkage and labeling approaches mainly rely on mappings of textual information to classes or properties in knowledge bases. In this work we outline methods to recover the semantics of tabular Open Data and to identify related content which allows a mapping and automated integration/categorization of Open Data resources and improves the overall usability and quality of Open Data. + Linked Data + Open Data + + + With the success of Open Data a huge amount of tabular data become available that could potentially be mapped and linked into the Web of (Linked) Data. The use of semantic web technologies would then allow to explore related content and enhanced search functionalities across data portals. However, existing linkage and labeling approaches mainly rely on mappings of textual information to classes or properties in knowledge bases. In this work we outline methods to recover the semantics of tabular Open Data and to identify related content which allows a mapping and automated integration/categorization of Open Data resources and improves the overall usability and quality of Open Data. + semantic table interpretation + table annotation + + semantic labeling + + + Linked Data + semantic labeling + + CSV + + Improving Open Data Usability through Semantics + CSV + Improving Open Data Usability through Semantics + related tables + + + table annotation + Open Data + related tables + + + + + semantic table interpretation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T14:00:00 + Multilinguality + 2016-10-19T15:20:00 + Multilinguality + + + + + + + + + + + + + + + + + + + + + + + + + Stefan Decker + + + + + + + + Stefan Decker + + 845a8211d50232fef2792db868e0d6fd4069e4ee + + + + Stefan Decker + + + f23513ddcad80a088d4976e7054faffb9f921dde + Xiaowang Zhang + + + + + Xiaowang Zhang + + + + + Xiaowang Zhang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + 2016-10-19T15:00:00 + 2016-10-19T14:40:00 + + + 2016-10-19T15:00:00 + 2016-10-19T15:00:00 + + 2016-10-19T14:40:00 + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + Gregoire Burel, Lara S. G. Piccolo and Harith Alani + 2016-10-19T15:00:00 + + + + + + + Enabling combined software and data engineering: the ALIGNED suite of ontologies + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Monika Solanki + Enabling combined software and data engineering: the ALIGNED suite of ontologies + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + Riccardo Rosati + Riccardo Rosati + + + + + + + + + + + Riccardo Rosati + 347c5f7b49d6802f343668845c30c5e610d28a7f + + + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Nandana Mihindukulasooriya, Esteban Gonzalez, Fernando Serena, Carlos Badenes and Oscar Corcho + FarolApp: Live Linked Data on Light Pollution + + FarolApp: Live Linked Data on Light Pollution + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OpenCitations + + + Citation Database + + Jailbreaking your reference lists: the OpenCitations strike again + Jailbreaking your reference lists: the OpenCitations strike again + + Citation Database + + In this poster paper we provide an overview of the OpenCitations project and of its main outcome, the OpenCitations Corpus, which is an open repository of scholarly citation data made available under a Creative Commons public domain dedication, which provides in RDF accurate citation information harvested from the scholarly literature. + + Jailbreaking your reference lists: the OpenCitations strike again + Semantic Publishing + + Scholarly Communication + + In this poster paper we provide an overview of the OpenCitations project and of its main outcome, the OpenCitations Corpus, which is an open repository of scholarly citation data made available under a Creative Commons public domain dedication, which provides in RDF accurate citation information harvested from the scholarly literature. + + Scholarly Communication + OpenCitations Corpus + + + + Semantic Publishing + + + + OpenCitations Corpus + + + OpenCitations + + + + + + + + + + + Shanghai Hi-knowledge Information Technology Corporation + Shanghai Hi-knowledge Information Technology Corporation + + Shanghai Hi-knowledge Information Technology Corporation + + + + + + Daisuke Horyu + Daisuke Horyu + + + + Daisuke Horyu + + + + bbc0aae11be589070701435792fc955e18dd8a4b + + + + + + + + + Bijan Parsia + + + + + ce8a053681800c2b8b8a5a181b7ca984caad8163 + + + + Bijan Parsia + Bijan Parsia + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + + Hong Fang and Xiaowang Zhang + 2016-10-19T21:00:00 + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + + 2016-10-19T21:00:00 + + + + + + + + + + + + + Biligsaikhan Batjargal + Biligsaikhan Batjargal + + + + + fd3a79d38afdd0918dca809283156908e404549f + + Biligsaikhan Batjargal + + + + + + + + + + + + Joao Paulo Almeida + + 36a6eaf225efb85cfc3090f065638706dee3e996 + + + + + + + + + Joao Paulo Almeida + Joao Paulo Almeida + + + + + + + + + + + + + + + + + consistency of criteria for classification + + evaluation + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + Ontologies are constructed in various fields such as medical information, mechanical design, and etc. It is important to build high quality ontologies so that these ontologies are used as knowledge bases and knowledge models for application systems. However it is hard to build good quality ontologies because of the necessity of both knowledge of ontology and expertise in their target domain. For this background, ontology construction and refinement costs a lot of time and ef-fort. In order to reduce such costs, we develop an ontology refinement support system. This system have two main function. First, the system can detect points that should be refined and propose how to refine it. Second, the system can evaluate ontologies quantitatively. This system indicate how ontologies are consistent in a classificatory criterion. To develop the refinement support system, we focus on a guideline for building well-organized ontologies that “Each subclass of a super class is distinguished by the values of exactly one attribute of the super class”. When an ontology is built following this guideline, there is similarity among Is-a hierarchies. We use these similar Is-a hierarchies and develop an ontology refinement system. + + + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + + + refinement + + refinement + evaluation + + + Ontologies are constructed in various fields such as medical information, mechanical design, and etc. It is important to build high quality ontologies so that these ontologies are used as knowledge bases and knowledge models for application systems. However it is hard to build good quality ontologies because of the necessity of both knowledge of ontology and expertise in their target domain. For this background, ontology construction and refinement costs a lot of time and ef-fort. In order to reduce such costs, we develop an ontology refinement support system. This system have two main function. First, the system can detect points that should be refined and propose how to refine it. Second, the system can evaluate ontologies quantitatively. This system indicate how ontologies are consistent in a classificatory criterion. To develop the refinement support system, we focus on a guideline for building well-organized ontologies that “Each subclass of a super class is distinguished by the values of exactly one attribute of the super class”. When an ontology is built following this guideline, there is similarity among Is-a hierarchies. We use these similar Is-a hierarchies and develop an ontology refinement system. + consistency of criteria for classification + + + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + + ontology + + ontology + + + + + + + + + The W3C RDF Stream Processing (RSP) community has proposed both a common model and a language for querying RDF streams. However, the current implementations of RSP systems are significantly different from each other in terms of performance. In this paper, we propose a unified interface for optimizing a continuous query in heterogeneous RSP systems. To enhance the performance of RSP, the unified interface decomposes query, reassembles partial queries and assigns them to appropriate RSP systems. Experimental results show that the proposed approach performances better in terms of memory consumption and latency. + + + RSP system + + + Unified query interface + RSP system + RDF stream processing + Unified query interface + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + + + + + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + + + RDF stream processing + + + The W3C RDF Stream Processing (RSP) community has proposed both a common model and a language for querying RDF streams. However, the current implementations of RSP systems are significantly different from each other in terms of performance. In this paper, we propose a unified interface for optimizing a continuous query in heterogeneous RSP systems. To enhance the performance of RSP, the unified interface decomposes query, reassembles partial queries and assigns them to appropriate RSP systems. Experimental results show that the proposed approach performances better in terms of memory consumption and latency. + + + + + + + + + + + + + + + + + George Gkotsis + + + + + George Gkotsis + + + b71e3ac43f0cbebb962e2233f1d66d72701cf027 + George Gkotsis + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T18:00:00 + Zhenyu Song, Xiaowang Zhang and Zhiyong Feng + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + Fondazione Bruno Kessler + + + + + + + Fondazione Bruno Kessler + + + + Fondazione Bruno Kessler + + + + + + + + + + + + + + + + + + + + + + + + + + + + Simen Heggestøyl + + Simen Heggestøyl + + + + + + + + Simen Heggestøyl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Takeshi Masuda + Takeshi Masuda + Takeshi Masuda + + 794ceb2d4c134124d418b0c3c6b3159f0f3c74b6 + + + + + + + + + + + + + + + + + + + + + + + + + Yousra Chabchoub + + + Yousra Chabchoub + + 9de082395a861352f1b8880591b2a37243fcc040 + + + Yousra Chabchoub + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + CWI + + + + CWI + CWI + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DBpedia Entity Type Inference Using Categories + + + + Category + DBpedia Entity Type Inference Using Categories + + + + + + Category + + Type Inference + + In this paper, we investigate how to identify entity type based on entity cat-egory information. In particular, we first calculate the statistical distribution of each category over all the types. And then we generate type candidates according to distribution probability. Finally we identify the correct type ac-cording to distribution probability, keywords in category and abstract. To evaluate the effectiveness of the approach, we conduct preliminary experi-ments on a real-world dataset from DBpedia. Experimental results indicate that our approach is effective in identifying entity types. + DBpedia + + + DBpedia Entity Type Inference Using Categories + In this paper, we investigate how to identify entity type based on entity cat-egory information. In particular, we first calculate the statistical distribution of each category over all the types. And then we generate type candidates according to distribution probability. Finally we identify the correct type ac-cording to distribution probability, keywords in category and abstract. To evaluate the effectiveness of the approach, we conduct preliminary experi-ments on a real-world dataset from DBpedia. Experimental results indicate that our approach is effective in identifying entity types. + Type Inference + + + DBpedia + + + + + + a7196b0a3b229d1659a5c85c9b48269cd7fd59ce + + + + + + Stefano Faralli + + Stefano Faralli + + + + + + Stefano Faralli + + + + + + + + + + + + Jiewen Wu + Jiewen Wu + Jiewen Wu + + + 3bed06bddf50be6302110aa1a143b58967a0da01 + + + + + + + + + + + + + + + + + + + + + + Raf Buyle + 30e38b8343a31860010a7094c693582569f4644d + Raf Buyle + + Raf Buyle + + + + + + + + + + + + Qaiser Mehmood + + bd06c55389c4f62fbca9dfa5d3e7b4b110dacd8e + + Qaiser Mehmood + + + + + Qaiser Mehmood + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andreas Steigmiller + + + + + + 0f010f0a40d013cd22f5b6573d0599271876826c + + Andreas Steigmiller + + + Andreas Steigmiller + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + 2016-10-19T11:40:00 + Tong Ruan, Lijuan Xue, Haofen Wang, Fanghuai Hu, Liang Zhao and Jun Ding + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + 2016-10-19T11:40:00 + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + + + + + + + + Victor Christen + + + Victor Christen + + 6a2471cdeab13902eb9650167b3bd8247f51b5fc + + + + Victor Christen + + + PRINTEPS + ROS + + + PRINTEPS + + We have developed PRactical INTElligent aPplicationS (PRINTEPS) which is a platform for developing comprehensive intelligence applications. This paper introduces an application of PRINTEPS for customer reception service in robot cafe by using stream reasoning and Robot Operating System (ROS) based on PRINTEPS, and for integrating image sensing with knowledge processing. Based on this platform, we demonstrate that the behaviors of a robot in a robot cafe can be modified by changing the applicable rule sets. + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + + + Stream Reasoning + + We have developed PRactical INTElligent aPplicationS (PRINTEPS) which is a platform for developing comprehensive intelligence applications. This paper introduces an application of PRINTEPS for customer reception service in robot cafe by using stream reasoning and Robot Operating System (ROS) based on PRINTEPS, and for integrating image sensing with knowledge processing. Based on this platform, we demonstrate that the behaviors of a robot in a robot cafe can be modified by changing the applicable rule sets. + SWRL + + + ROS + + + + + + + Stream Reasoning + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + SWRL + + + + + + + + + + + + + + + + + Aalborg University + + Aalborg University + Aalborg University + + + + + + + + + + + + + + + + + + + + + + + + Mikako Saito + + + + + + Mikako Saito + a0636d694231bfca5a3a958eb96b271b84120584 + Mikako Saito + + + + + + Julien Subercaze + + Julien Subercaze + + + + + + + + + + + Julien Subercaze + + + 0b0b215b56f9139ca2bacc806ea8703f88d79add + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kavitha Srinivas + + + + 3524dd31de79a544b21dd104327c3c7eb9a9f139 + Kavitha Srinivas + + + + + + Kavitha Srinivas + + + + + + + + + + + + + + 2016-10-20T11:30:00 + Linhong Zhu, Majid Ghasemi-Gol, Pedro Szekely, Aram Galstyan and Craig Knoblock + Unsupervised Entity Resolution on Multi-type Graphs + Unsupervised Entity Resolution on Multi-type Graphs + 2016-10-20T11:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + + + 2016-10-20T11:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tuan-Dung Cao + + + Tuan-Dung Cao + b3b18b2d2b5de289ce8ed2386460465e7f3f1a5e + + + + + Tuan-Dung Cao + + + + + + + + + + + + + + + + + + + + + + + + + + + Knowledge Media Institute, The Open University + + + + + + + + Knowledge Media Institute, The Open University + + Knowledge Media Institute, The Open University + + + + c085a1f84c6c5690bd9e12a8b22016d2df360931 + + Angelo Antonio Salatino + Angelo Antonio Salatino + + + + + + + + Angelo Antonio Salatino + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Muhammad Amith + + + + 223fc2556836e04f425c459b7f86a86c2399ed1c + Muhammad Amith + Muhammad Amith + + + + + + + + + + + + + + 279b9d72c9d959d4f93219e45a9d41fc980ad9b9 + Simon Razniewski + + + + + + Simon Razniewski + Simon Razniewski + + + + + + + + + + + + + + + + + + + + + + + + + e86fa597c65874b942cb26a8f63d966084dde342 + + + Veronika Thost + + + + + + Veronika Thost + + + + Veronika Thost + + + + 2016-10-20T13:50:00 + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + Steven de Rooij, Wouter Beek, Stefan Schlobach and Frank Van Harmelen + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + + + 2016-10-20T13:50:00 + + + + + + EPFL + + EPFL + + + + + EPFL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Real-time analytics that requires integration and aggregation of heterogeneous and distributed streaming and static data is a typical task in many industrial scenarios such as diagnostics of turbines in Siemens. OBDA approach has a great potential to facilitate such tasks; however, it has a number of limitations in dealing with analytics that restrict its use in important industrial applications. Based on our experience with Siemens, we argue that in order to overcome those limitations OBDA should be extended and become analytics, source, and cost aware. In this work we propose such an extension. In particular, we propose an ontology, mapping, and query language for OBDA, where aggregate and other analytical functions are first class citizens. Moreover, we develop query optimisation techniques that allow to efficiently process analytical tasks over static and streaming data. We implement our approach in a system and evaluate our system with Siemens turbine data. + + + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + Analytics + + OBDA + + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + Diagnostics + + + + + + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + + + + + + + + + + + OBDA + + Diagnostics + + + + Analytics + Real-time analytics that requires integration and aggregation of heterogeneous and distributed streaming and static data is a typical task in many industrial scenarios such as diagnostics of turbines in Siemens. OBDA approach has a great potential to facilitate such tasks; however, it has a number of limitations in dealing with analytics that restrict its use in important industrial applications. Based on our experience with Siemens, we argue that in order to overcome those limitations OBDA should be extended and become analytics, source, and cost aware. In this work we propose such an extension. In particular, we propose an ontology, mapping, and query language for OBDA, where aggregate and other analytical functions are first class citizens. Moreover, we develop query optimisation techniques that allow to efficiently process analytical tasks over static and streaming data. We implement our approach in a system and evaluate our system with Siemens turbine data. + Static Data + + + Description Logic + + + + + + + Description Logic + + + Static Data + Streaming Data + Streaming Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Institute for High Performance Computing and Networking (ICAR-CNR) + Institute for High Performance Computing and Networking (ICAR-CNR) + Institute for High Performance Computing and Networking (ICAR-CNR) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ross Stirling + + + Ross Stirling + + + + 8569f7e40415c10354c13ee042d67273826913f8 + + + Ross Stirling + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OWL coverage + A variety of tools for visualizing, editing, and documenting OWL ontologies have been developed in the last couple of years. The OWL coverage and conformance of these tools usually needs to be tested during development or for evaluation and comparison purposes. However, in particular for the testing of special OWL concepts and concept combinations, it can be tedious to find suitable ontologies and test cases. We have developed OntoBench, a generator for OWL 2 benchmark ontologies that can be used to test and compare ontology visualizers and related tools. In contrast to existing OWL benchmarks, OntoBench does not focus on scalability and performance but OWL coverage and concept combinations. Consistent benchmark ontologies are dynamically generated based on OWL 2 language constructs selected in a graphical user interface. OntoBench is available on GitHub and as a public service, making it easy to use the tool and generate custom ontologies or ontology fragments. + benchmark + ontology + + + A variety of tools for visualizing, editing, and documenting OWL ontologies have been developed in the last couple of years. The OWL coverage and conformance of these tools usually needs to be tested during development or for evaluation and comparison purposes. However, in particular for the testing of special OWL concepts and concept combinations, it can be tedious to find suitable ontologies and test cases. We have developed OntoBench, a generator for OWL 2 benchmark ontologies that can be used to test and compare ontology visualizers and related tools. In contrast to existing OWL benchmarks, OntoBench does not focus on scalability and performance but OWL coverage and concept combinations. Consistent benchmark ontologies are dynamically generated based on OWL 2 language constructs selected in a graphical user interface. OntoBench is available on GitHub and as a public service, making it easy to use the tool and generate custom ontologies or ontology fragments. + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + + + + + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + OWL coverage + + + documentation + OWL 2 + + OWL 2 + visualization + benchmark + ontology + + documentation + + + + + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + visualization + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Silvio Peroni + + + Silvio Peroni + e813f1989c5b90bd05bc71b3a7bb62e3df558122 + + + + + Silvio Peroni + + + + + + + 2016-10-21T13:50:00 + Integrating medical scientific knowledge with the semantically Quantified Self + 2016-10-21T13:50:00 + + Integrating medical scientific knowledge with the semantically Quantified Self + 2016-10-21T13:50:00 + 2016-10-21T13:50:00 + 2016-10-21T13:30:00 + + + 2016-10-21T13:30:00 + Allan Third, George Gkotsis, Eleni Kaldoudi, George Drosatos, Nick Portokallidis, Stefanos Roumeliotis, Kalliopi Pafilis and John Domingue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Data Integration + + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + schema.org + + + + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + Dataset + Dataset + + + + + + Data Integration + Increasingly, Web pages mix entities coming from different sources and represented in several different ways. It can thus happen that the same entity is both described by using schema.org annotations and by creating a text anchor pointing to its Wikipedia page. Often, those representations provide complementary information which is not exploited since those entities are disjoint. + +In this project, we explore the extent to which entities represented in different ways repeat on the Web, how they are related, and how they complement (or link) to each other. Our initial experiments show that we can unveil a previously (unexploited) knowledge graph by applying simple instance matching techniques on a large collection of schema.org annotations and DBpedia. The resulting knowledge graph aggregates entities (often tail entities) scattered across several Web pages, and complements existing DBpedia entities with new facts and properties. + +In order to facilitate further investigations in how to mine such information, we are releasing i) an excerpt of all CommonCrawl web pages containing both Wikipedia and schema.org annotations, ii) the toolset to extract this information and perform knowledge graph construction and mapping onto DBpedia, as well as iii) the resulting knowledge graph (VoldemortKG) obtained via label matching techniques. + + Knowledge Graphs + + + Instance Matching + + + Knowledge Graphs + + + Instance Matching + schema.org + Increasingly, Web pages mix entities coming from different sources and represented in several different ways. It can thus happen that the same entity is both described by using schema.org annotations and by creating a text anchor pointing to its Wikipedia page. Often, those representations provide complementary information which is not exploited since those entities are disjoint. + +In this project, we explore the extent to which entities represented in different ways repeat on the Web, how they are related, and how they complement (or link) to each other. Our initial experiments show that we can unveil a previously (unexploited) knowledge graph by applying simple instance matching techniques on a large collection of schema.org annotations and DBpedia. The resulting knowledge graph aggregates entities (often tail entities) scattered across several Web pages, and complements existing DBpedia entities with new facts and properties. + +In order to facilitate further investigations in how to mine such information, we are releasing i) an excerpt of all CommonCrawl web pages containing both Wikipedia and schema.org annotations, ii) the toolset to extract this information and perform knowledge graph construction and mapping onto DBpedia, as well as iii) the resulting knowledge graph (VoldemortKG) obtained via label matching techniques. + + + + + + + + + + + + + + + + + 0651939447753f025b79b237ca503607e5a97f93 + + + + + + Nobuhiko Tanaka + + + + + Nobuhiko Tanaka + + Nobuhiko Tanaka + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + integration + + + + + + + Linked Sensor Data Generation using Queryable RML Mappings + Linked Sensor Data Generation using Queryable RML Mappings + application + demo + RML + + Linked Sensor Data Generation using Queryable RML Mappings + As the amount of generated sensor data is increasing, semantic interoperability becomes an important aspect in order to support efficient data distribution and communication. Therefore, the integration and fusion of (sensor) data is important, as this data is coming from different data sources and might be in different formats. Furthermore, reusable and extensible methods for this integration and fusion are required in order to be able to scale with the growing number of applications that generate semantic sensor data. Current research efforts allow to map sensor data to Linked Data in order to provide semantic interoperability. However, they lack support for multiple data sources, hampering the integration and fusion. Furthermore, the used methods are not available for reuse or are not extensible, which hampers the development of applications. In this paper, we describe how the RDF Mapping Language (RML) and a Triple Pattern Fragments (TPF) server are used to address these shortcomings. %define reusable and extensible mappings to generate Linked Data based on heterogeneous (sensor) data. The demonstration consists of a micro controller that generates sensor data. The data is captured and mapped to RDF triples using module-specific RML mappings, which are queried from a TPF server. + + As the amount of generated sensor data is increasing, semantic interoperability becomes an important aspect in order to support efficient data distribution and communication. Therefore, the integration and fusion of (sensor) data is important, as this data is coming from different data sources and might be in different formats. Furthermore, reusable and extensible methods for this integration and fusion are required in order to be able to scale with the growing number of applications that generate semantic sensor data. Current research efforts allow to map sensor data to Linked Data in order to provide semantic interoperability. However, they lack support for multiple data sources, hampering the integration and fusion. Furthermore, the used methods are not available for reuse or are not extensible, which hampers the development of applications. In this paper, we describe how the RDF Mapping Language (RML) and a Triple Pattern Fragments (TPF) server are used to address these shortcomings. %define reusable and extensible mappings to generate Linked Data based on heterogeneous (sensor) data. The demonstration consists of a micro controller that generates sensor data. The data is captured and mapped to RDF triples using module-specific RML mappings, which are queried from a TPF server. + Linked Sensor Data + Linked Sensor Data + + + + + + integration + + TPF + demo + + + RML + TPF + application + + + + + + Michał Blinkiewicz + Michał Blinkiewicz + e514fe646a5d9a1ffd3a7d5aa995b9c5df1f1f4e + Michał Blinkiewicz + + + + + + + + + + + + + + + + + + + + + + INRIA + + INRIA + + + + + + + + INRIA + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:00:00 + 2016-10-18T12:00:00 + 2016-10-18T12:00:00 + 2016-10-18T12:00:00 + 2016-10-18T11:00:00 + Session 1 + 2016-10-18T12:00:00 + Session 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Stefan Negru + + + + + + + Stefan Negru + + 79958c6b5a6f44ea665c85179a17542cefb2cb46 + Stefan Negru + + + + + + + + + + + + Jorge Gracia + + 49a8499c1bb33dacd1076662488f85055c22dad2 + + + Jorge Gracia + + Jorge Gracia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + Knowledge Graphs + + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + Question Answering + + + + + + We present a domain-agnostic system for Question Answering over multiple semi-structured and possibly linked datasets without the need of a training corpus. The system is motivated by an industry use-case where Enterprise Data needs to be combined with a large body of Open Data to fulfill information needs not satisfied by prescribed application data models. Our proposed Question Answering pipeline combines existing components with novel methods to perform, in turn, linguistic analysis of a query, named entity extraction, entity / graph search, fusion and ranking of possible answers. We evaluate QuerioDALI with two open-domain benchmarks and a biomedical one over Linked Open Data sources, and show that our system produces comparable results to systems that require training data and are domain-dependent. In addition, we analyze the current challenges and shortcomings. + + + + + Linked Data + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + Linked Data + + We present a domain-agnostic system for Question Answering over multiple semi-structured and possibly linked datasets without the need of a training corpus. The system is motivated by an industry use-case where Enterprise Data needs to be combined with a large body of Open Data to fulfill information needs not satisfied by prescribed application data models. Our proposed Question Answering pipeline combines existing components with novel methods to perform, in turn, linguistic analysis of a query, named entity extraction, entity / graph search, fusion and ranking of possible answers. We evaluate QuerioDALI with two open-domain benchmarks and a biomedical one over Linked Open Data sources, and show that our system produces comparable results to systems that require training data and are domain-dependent. In addition, we analyze the current challenges and shortcomings. + Knowledge Graphs + + Question Answering + + + + + + + + + + Anisa Rula + + + + + + + + Anisa Rula + Anisa Rula + + + + + + + + + + + + + + + + + + + + + 2016-10-18T14:00:00 + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T14:00:00 + Session 2 + Session 2 + 2016-10-18T15:15:00 + + + Giuseppe Pirrò + 8be175055825920c261d839f3b5a3c3b8a6b14a1 + + + Giuseppe Pirrò + + + + + + Giuseppe Pirrò + + + + + + + + + + + + 958555a51b4260d27615aabe33fb2e8d8c44e6d6 + Anika Groß + + + + + + Anika Groß + + Anika Groß + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Amit Joshi + + + Amit Joshi + + + + + Amit Joshi + 56f92704925eaf91a067a1961518cc4bf98cb3e3 + + + + + + + + + + Lowering knowledge : Making constrained devices semantically interoperable + + + + IoT data management + Data enrichment + Data enrichment + Semantic schema mapping + + Semantic interoperability is an issue in heterogeneous IoT systems. The limited processing power and memory storage of constrained IoT nodes prevents them from handling enriched data. This paper proposes a method to lower complex knowledge representations into simpler structured data, based on the reuse of lifting mappings from data schemas to semantic models. + + + Lowering knowledge : Making constrained devices semantically interoperable + Lowering knowledge : Making constrained devices semantically interoperable + + Semantic interoperability is an issue in heterogeneous IoT systems. The limited processing power and memory storage of constrained IoT nodes prevents them from handling enriched data. This paper proposes a method to lower complex knowledge representations into simpler structured data, based on the reuse of lifting mappings from data schemas to semantic models. + Knowledge lowering + + IoT data management + + Knowledge lowering + + + Semantic schema mapping + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T16:00:00 + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + Session 3 + Session 3 + 2016-10-18T16:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Department of Computer Science & Engineering, East China University of Science and Technology + + Department of Computer Science & Engineering, East China University of Science and Technology + + Department of Computer Science & Engineering, East China University of Science and Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Personalized robot interactions to intercept behavioral disturbances of people with dementia + + + + + Dementia + + + + People with Dementia (PwD) exhibit Behavioral Disturbances (BD) that can be alleviated by personalized interactions, revisiting memories and promoting comfort and quality of life. However, caregivers are unable to spend a lot of time on these interactions. This work-in-progress poster details the design and deployment of a semantic Internet of Robotic Things (IoRT) platform that enables personalized interactions of a robot with a PwD to reduce and intercept BDs. + + Dementia + Personalized robot interactions to intercept behavioral disturbances of people with dementia + + + + + eHealth + + + Robotics + + + Internet of Robotic Things (IoRT) + + + + People with Dementia (PwD) exhibit Behavioral Disturbances (BD) that can be alleviated by personalized interactions, revisiting memories and promoting comfort and quality of life. However, caregivers are unable to spend a lot of time on these interactions. This work-in-progress poster details the design and deployment of a semantic Internet of Robotic Things (IoRT) platform that enables personalized interactions of a robot with a PwD to reduce and intercept BDs. + + + + + Behavioral Disturbance + + + + eHealth + Robotics + + Personalized robot interactions to intercept behavioral disturbances of people with dementia + Internet of Robotic Things (IoRT) + + + + Behavioral Disturbance + + + + + + + + + + + + + + + + + + + + + + + + + + + Cristina Feier + + + + + + Cristina Feier + + Cristina Feier + + + + 362a9cf0719836e44b681ddca183db628432d6e5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + One focus of Semantic Technologies are formalisms that allow to express complex properties of and relationships between classes of data. The declarative nature of these formalisms is close to natural language and human conceptualisation and thus Semantic Technologies enjoy increasing popularity in scenarios where traditional solutions lead to very convoluted procedures which are difficult to maintain and whose correctness is difficult to judge. +A fruitful application of Semantic Technologies in the field of health care data analysis has emerged from the collaboration between Oxford and Kaiser Permanente a US health care provider (HMO). US HMOs have to annually deliver measurement results on their quality of care to US authorities. One of these sets of measurements is defined in a specification called HEDIS which is infamous amongst data analysts for its complexity. Traditional solutions with either SAS-programs or SQL-queries lead to involved solutions whose maintenance and validation is difficult and binds considerable amount of resources. +In this paper we present the project in which we have applied Semantic Technologies to compute the most difficult part of the HEDIS measures. We show that we arrive at a clean, structured and legible encoding of HEDIS in the rule language of the RDF-triple store RDFox. We use RDFox's reasoning capabilities and SPARQL queries to compute and extract the results. The results of a whole Kaiser Permanente regional branch could be computed in competitive time by RDFox on readily available commodity hardware. Further development and deployment of the project results are envisaged in Kaiser Permanente. + SWRL + + + + RDFox + + Forward Chaining + Semantic Technologies for Data Analysis in Health Care + + + Data Analysis + Materialisation + Materialisation + Datalog + RDF + RDFox + + Forward Chaining + Semantic Technologies for Data Analysis in Health Care + + + + SWRL + Triple store + Semantic Technologies + + RDF + Triple store + + + + Data Analysis + Datalog + + + + + + One focus of Semantic Technologies are formalisms that allow to express complex properties of and relationships between classes of data. The declarative nature of these formalisms is close to natural language and human conceptualisation and thus Semantic Technologies enjoy increasing popularity in scenarios where traditional solutions lead to very convoluted procedures which are difficult to maintain and whose correctness is difficult to judge. +A fruitful application of Semantic Technologies in the field of health care data analysis has emerged from the collaboration between Oxford and Kaiser Permanente a US health care provider (HMO). US HMOs have to annually deliver measurement results on their quality of care to US authorities. One of these sets of measurements is defined in a specification called HEDIS which is infamous amongst data analysts for its complexity. Traditional solutions with either SAS-programs or SQL-queries lead to involved solutions whose maintenance and validation is difficult and binds considerable amount of resources. +In this paper we present the project in which we have applied Semantic Technologies to compute the most difficult part of the HEDIS measures. We show that we arrive at a clean, structured and legible encoding of HEDIS in the rule language of the RDF-triple store RDFox. We use RDFox's reasoning capabilities and SPARQL queries to compute and extract the results. The results of a whole Kaiser Permanente regional branch could be computed in competitive time by RDFox on readily available commodity hardware. Further development and deployment of the project results are envisaged in Kaiser Permanente. + Semantic Technologies + Semantic Technologies for Data Analysis in Health Care + + + + + + + + + + + + + Daniel Garijo + + + + + + Daniel Garijo + + Daniel Garijo + + + + In recent years, there has been an increasing efforts to develop techniques for related entity recommendation, where the task is to retrieve a ranked list of related entities given a keyword query. Another trend in the area of information retrieval (IR) is to take temporal aspects of a given query into account when assessing the relevance of documents. However, while this has become an established functionality in document search engines, the significance of time, especially when explicitly given, has not been recognized for entity recommendation, yet. We address this gap by introducing the task of time-aware entity recommendation. We propose the first probabilistic model that takes time-awareness into consideration for entity recommendation by leveraging heterogeneous knowledge of entities extracted from different data sources publicly available on the Web. We extensively evaluate the proposed approach and our experimental results show considerable improvements compare to time-agnostic entity recommendation approaches. + + + A Probabilistic Model for Time-Aware Entity Recommendation + + + + Probabilistic Model + Probabilistic Model + + Time-awareness + + + + A Probabilistic Model for Time-Aware Entity Recommendation + + + Time-awareness + Entity Recommendation + + + Entity Recommendation + + A Probabilistic Model for Time-Aware Entity Recommendation + In recent years, there has been an increasing efforts to develop techniques for related entity recommendation, where the task is to retrieve a ranked list of related entities given a keyword query. Another trend in the area of information retrieval (IR) is to take temporal aspects of a given query into account when assessing the relevance of documents. However, while this has become an established functionality in document search engines, the significance of time, especially when explicitly given, has not been recognized for entity recommendation, yet. We address this gap by introducing the task of time-aware entity recommendation. We propose the first probabilistic model that takes time-awareness into consideration for entity recommendation by leveraging heterogeneous knowledge of entities extracted from different data sources publicly available on the Web. We extensively evaluate the proposed approach and our experimental results show considerable improvements compare to time-agnostic entity recommendation approaches. + + + knowledge resource + knowledge base + + + + There are many studies on question answering system which can answer to natural language questions. Diverse techniques are required for building this system, but it cannot be implemented without well-structured knowledge data. For this reason, we construct a large-scale knowledge base in Korean, with the goal of creating a uniquely Korean question answering system. + + + question answering system + + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + ontology + question answering system + ontology + knowledge resource + + + + + + knowledge base + + + + + There are many studies on question answering system which can answer to natural language questions. Diverse techniques are required for building this system, but it cannot be implemented without well-structured knowledge data. For this reason, we construct a large-scale knowledge base in Korean, with the goal of creating a uniquely Korean question answering system. + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + + Lancaster University + + + + Lancaster University + + + Lancaster University + + + + + + + + + + + + + + + + + + + + + + + + + + This paper describes an extension of the TableMiner+ sys- tem, the only open source Semantic Table Interpretation system that annotates Web tables using Linked Data in an effective and efficient ap- proach. It adds a graphical user interface to TableMiner+, to facilitate the visualization and correction of automatically generated annotations. This makes TableMiner+ an ideal tool for the semi-automatic creation of high-quality semantic annotations on tabular data, which facilitates the publication of Linked Data on the Web. + + Visualizing Semantic Table Annotations with TableMiner+ + + + + Visualizing Semantic Table Annotations with TableMiner+ + + Semantic Table Interpretation + Named Entity Disambiguation + + This paper describes an extension of the TableMiner+ sys- tem, the only open source Semantic Table Interpretation system that annotates Web tables using Linked Data in an effective and efficient ap- proach. It adds a graphical user interface to TableMiner+, to facilitate the visualization and correction of automatically generated annotations. This makes TableMiner+ an ideal tool for the semi-automatic creation of high-quality semantic annotations on tabular data, which facilitates the publication of Linked Data on the Web. + + Linked Data + Visualizing Semantic Table Annotations with TableMiner+ + + Semantic Table Interpretation + + Web table + Named Entity Disambiguation + + Linked Data + + Web table + + table annotation + table annotation + + + + + University of Birmingham + University of Birmingham + + + University of Birmingham + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T15:30:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + Embeddings & Neural Approaches + 2016-10-20T16:50:00 + 2016-10-20T15:30:00 + Embeddings & Neural Approaches + + + + + + + + + + + + + + + José Luis Ambite + + José Luis Ambite + + + + + + 1e430f2ad8c3dd42da0ac00ed2c6a7c3e6fcaeb5 + José Luis Ambite + + + + + + + + + + + + Joachim Van Herwegen + + + + Joachim Van Herwegen + + + + 485c05c89867f48a36372dbec0ff72e2768ce2df + Joachim Van Herwegen + + + + + + + + + + + + + + + + + + + + + + + + + Victorio Albani Carvalho + + Victorio Albani Carvalho + + + fae2979c50e663691b141cad1d70fdf9a83fe68e + Victorio Albani Carvalho + + + + + + + + + Yao Meng + + + + f174c419d83571a98647083026223b0bd3a0b2e3 + + Yao Meng + + + + Yao Meng + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dhavalkumar Thakker + + + b046ea56f6da0922f220c34b48f164c77866ede0 + Dhavalkumar Thakker + + + Dhavalkumar Thakker + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Expressive Multi-Level Modeling for the Semantic Web + + + Expressive Multi-Level Modeling for the Semantic Web + + + + + + + + multi-level modeling + semantic web + In several subject domains, classes themselves may be subject to categorization, resulting in classes of classes (or “metaclasses”). When representing these do-mains, one needs to capture not only entities of different classification levels, but also their (intricate) relations. We observe that this is challenging in current Se-mantic Web languages as there is little support to guide the modeler in producing correct multi-level ontologies, especially because of the nuances in the constraints that apply to entities of different classification levels and their relations. In order to address these representation challenges, we propose a vocabulary that can be used as a basis for multi-level ontologies in OWL along with a number of integri-ty constraints to prevent the construction of inconsistent models. In this process we employ an axiomatic theory called MLT (a Multi-Level Modeling Theory). + metamodeling + + OWL + multi-level modeling + + metamodeling + OWL + In several subject domains, classes themselves may be subject to categorization, resulting in classes of classes (or “metaclasses”). When representing these do-mains, one needs to capture not only entities of different classification levels, but also their (intricate) relations. We observe that this is challenging in current Se-mantic Web languages as there is little support to guide the modeler in producing correct multi-level ontologies, especially because of the nuances in the constraints that apply to entities of different classification levels and their relations. In order to address these representation challenges, we propose a vocabulary that can be used as a basis for multi-level ontologies in OWL along with a number of integri-ty constraints to prevent the construction of inconsistent models. In this process we employ an axiomatic theory called MLT (a Multi-Level Modeling Theory). + Expressive Multi-Level Modeling for the Semantic Web + + + + + + semantic web + + + + + + + + + + + + + + + + + + + + + + + Linked Data Fragments + Querying Dynamic Datasources with Continuously Mapped Sensor Data + SPARQL + RML + + + SPARQL + + + dynamic data + + + + The world contains a large amount of sensors that produce new data at a high frequency. It is currently very hard to find public services that expose these measurements as dynamic Linked Data, We investigate how sensor data can be published continuously on the Web at a low cost. This paper describes how the publication of various sensor data sources can be done by continuously mapping raw sensor data to RDF and inserting it into a live, low-cost server. This makes it possible for clients to continuously evaluate dynamic queries using public sensor data. For our demonstration, we will illustrate how this pipeline works for the publication of temperature and humidity data originating from a microcontroller, and how it can be queried. + dynamic data + The world contains a large amount of sensors that produce new data at a high frequency. It is currently very hard to find public services that expose these measurements as dynamic Linked Data, We investigate how sensor data can be published continuously on the Web at a low cost. This paper describes how the publication of various sensor data sources can be done by continuously mapping raw sensor data to RDF and inserting it into a live, low-cost server. This makes it possible for clients to continuously evaluate dynamic queries using public sensor data. For our demonstration, we will illustrate how this pipeline works for the publication of temperature and humidity data originating from a microcontroller, and how it can be queried. + + Linked Data + + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + Linked Data Fragments + + + Linked Data + + + + RML + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + + + + + + + + + 828ae3c3ff393840381e683deef160054f888d2a + + + + Thi-Nhu Nguyen + + + + Thi-Nhu Nguyen + Thi-Nhu Nguyen + + + + + + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + Reasoning + 2016-10-21T15:30:00 + Reasoning + + + + + + + + ce91cb3f688cea4a277c9dfc9f0f0a7f604bfec3 + Lei Zhang + + + + + Lei Zhang + + + Lei Zhang + + + + + + + + + + + + + + + + + + + + + Data Integration for the Media Value Chain + + tv and film production + Data Integration for the Media Value Chain + semantic metadata + + + + + + + + Data Integration for the Media Value Chain + tv and film production + linked data integration + semantic metadata + + + + + +With the switch from analog to digital technology the entire process of production, distribution, and archival of a film and tv program large amounts of data are created. Besides recorded and processed audiovisual information, in each single step of the production process and furthermore throughout the entire media value chain new metadata is created, administrated, and put into relation with already existing metadata mandatory for the management of these processes. Due to competing standards as well as to proprietary and incompatible interfaces of the applied software tools, a significant amount of this metadata is lost again and not available for subsequent steps in the process chain. As a consequence most of this valuable information has to be costly recreated in each single step of media production, distribution, and archival. Currently, there is no generally accepted nor commonly used metadata exchange format that is applied throughout the media value chain. But, also the market for media production companies has changed dramatically towards the internet as being the preferred distribution channel for all media content. Today’s available limited budget for media production companies puts additional pressure to work in a cost and time efficient way and not to waste resources due to the necessity of costly reengineering of lost metadata. The dwerft project aims to apply Linked Data principles for all metadata exchange through all steps of the media value chain. Starting with the very first idea for a script, all metadata are mapped to either existing or newly developed ontologies to be reused in subsequent steps of the media value chain. Thus, metadata collected during the media production becomes a valuable asset not only for each step from pre- to postproduction, but also in distribution and archival. +This paper presents results of the dwerft project about the successful integration of a set of film production tools based on the Linked Production Data Cloud, a technology platform for the film and tv industry to enable software interoperability used in production, distribution, and archival of audiovisual content. + linked data integration + + + +With the switch from analog to digital technology the entire process of production, distribution, and archival of a film and tv program large amounts of data are created. Besides recorded and processed audiovisual information, in each single step of the production process and furthermore throughout the entire media value chain new metadata is created, administrated, and put into relation with already existing metadata mandatory for the management of these processes. Due to competing standards as well as to proprietary and incompatible interfaces of the applied software tools, a significant amount of this metadata is lost again and not available for subsequent steps in the process chain. As a consequence most of this valuable information has to be costly recreated in each single step of media production, distribution, and archival. Currently, there is no generally accepted nor commonly used metadata exchange format that is applied throughout the media value chain. But, also the market for media production companies has changed dramatically towards the internet as being the preferred distribution channel for all media content. Today’s available limited budget for media production companies puts additional pressure to work in a cost and time efficient way and not to waste resources due to the necessity of costly reengineering of lost metadata. The dwerft project aims to apply Linked Data principles for all metadata exchange through all steps of the media value chain. Starting with the very first idea for a script, all metadata are mapped to either existing or newly developed ontologies to be reused in subsequent steps of the media value chain. Thus, metadata collected during the media production becomes a valuable asset not only for each step from pre- to postproduction, but also in distribution and archival. +This paper presents results of the dwerft project about the successful integration of a set of film production tools based on the Linked Production Data Cloud, a technology platform for the film and tv industry to enable software interoperability used in production, distribution, and archival of audiovisual content. + + + + + ontologies for manufacturing + + ontology editor + + ontologies for manufacturing + + + + + + + + + + + + + + + + ontology editor + + + + + Capturing Industrial Information Models with Ontologies and Constraints + Capturing Industrial Information Models with Ontologies and Constraints + This paper describes the outcomes of an ongoing collaboration between Siemens and the University of Oxford, with the goal of facilitating the design of ontologies and their deployment in applications. Ontologies are mainly used in Siemens to capture the conceptual information models underpinning a wide range of applications. We start by describing the key role that such models play in two use cases in the manufacturing and energy production sectors. Then, we discuss the formalisation of information models using ontologies, and the relevant reasoning services. Finally, we present SOMM---a tool that supports engineers with little background on semantic technologies in the creation of ontology-based models and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for model integration. Our evaluation demonstrates the adequacy of SOMM's functionality and performance for Siemens applications. + This paper describes the outcomes of an ongoing collaboration between Siemens and the University of Oxford, with the goal of facilitating the design of ontologies and their deployment in applications. Ontologies are mainly used in Siemens to capture the conceptual information models underpinning a wide range of applications. We start by describing the key role that such models play in two use cases in the manufacturing and energy production sectors. Then, we discuss the formalisation of information models using ontologies, and the relevant reasoning services. Finally, we present SOMM---a tool that supports engineers with little background on semantic technologies in the creation of ontology-based models and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for model integration. Our evaluation demonstrates the adequacy of SOMM's functionality and performance for Siemens applications. + + + ontologies + Capturing Industrial Information Models with Ontologies and Constraints + + + + ontologies + + + + + + + + + + + + + + + + + + + + + + + RDF + + + + + + Distributed RDF Query Answering with Dynamic Data Exchange + distributed query answering + Evaluating joins over RDF data stored in a shared-nothing server cluster is key +to processing truly large RDF datasets. To the best of our knowledge, the +existing approaches use a variant of the data exchange operator that is +inserted into the query plan statically (i.e., at query compile time) to +shuffle data between servers. We argue that this often misses opportunities for +local computation, and we present a novel solution to distributed query +answering that consists of two main components. First, we present a query +answering algorithm based on dynamic data exchange, which exploits data +locality better than the static approaches. Second, we present a partitioning +algorithm for RDF data based on graph partitioning whose aim is to increase +data locality. We have implemented our approach in the RDFox system, and our +performance evaluation suggests that our techniques outperform the state of the +art by up to an order of magnitude. + data partitioning + SPARQL + + data partitioning + + + Distributed RDF Query Answering with Dynamic Data Exchange + SPARQL + + + + + RDF + + + Distributed RDF Query Answering with Dynamic Data Exchange + + + distributed query answering + + + + Evaluating joins over RDF data stored in a shared-nothing server cluster is key +to processing truly large RDF datasets. To the best of our knowledge, the +existing approaches use a variant of the data exchange operator that is +inserted into the query plan statically (i.e., at query compile time) to +shuffle data between servers. We argue that this often misses opportunities for +local computation, and we present a novel solution to distributed query +answering that consists of two main components. First, we present a query +answering algorithm based on dynamic data exchange, which exploits data +locality better than the static approaches. Second, we present a partitioning +algorithm for RDF data based on graph partitioning whose aim is to increase +data locality. We have implemented our approach in the RDFox system, and our +performance evaluation suggests that our techniques outperform the state of the +art by up to an order of magnitude. + + + Diego Esteves + bf544080194f9f13e5c3116db1cb169cda21b76f + + + + + + + Diego Esteves + + + Diego Esteves + + + + + + + + + + + Gregoire Burel + + + Gregoire Burel + + + + 0b33c9bd854b746d01796c263e976d664bcdbda9 + Gregoire Burel + + + + + + + + + + + + + + + + + + + + + + + + + + + + Faceted search over RDF-based knowledge graphs + + + RDF + Algorithms + + + Ontology + + + + RDF + Faceted search + + + SPARQL + + + Knowledge graphs such as Yago and Freebase have become a powerful asset for enhancing search, and are being intensively used in both academia and industry. Many existing knowledge graphs are either available as Linked Open Data, or they can be exported as RDF datasets enhanced with background knowledge in the form of an OWL 2 ontology. Faceted search is the de facto approach for exploratory search in many online applications, and has been recently proposed as a suitable paradigm for querying RDF repositories. In this paper, we provide rigorous theoretical underpinnings for faceted search in the context of RDF-based knowledge graphs enhanced with OWL 2 ontologies. We identify well-defined fragments of SPARQL that can be naturally captured using faceted search as a query paradigm, and establish the computational complexity of answering such queries. We also study the problem of updating faceted interfaces, which is critical for guiding users in the formulation of meaningful queries during exploratory search. We have implemented our approach in a fully-fledged faceted search system, SemFacet, which we have evaluated over the Yago knowledge graph. + Faceted search over RDF-based knowledge graphs + Ontology + OWL 2 + Faceted search over RDF-based knowledge graphs + + + + SPARQL + Faceted search + Knowledge graphs such as Yago and Freebase have become a powerful asset for enhancing search, and are being intensively used in both academia and industry. Many existing knowledge graphs are either available as Linked Open Data, or they can be exported as RDF datasets enhanced with background knowledge in the form of an OWL 2 ontology. Faceted search is the de facto approach for exploratory search in many online applications, and has been recently proposed as a suitable paradigm for querying RDF repositories. In this paper, we provide rigorous theoretical underpinnings for faceted search in the context of RDF-based knowledge graphs enhanced with OWL 2 ontologies. We identify well-defined fragments of SPARQL that can be naturally captured using faceted search as a query paradigm, and establish the computational complexity of answering such queries. We also study the problem of updating faceted interfaces, which is critical for guiding users in the formulation of meaningful queries during exploratory search. We have implemented our approach in a fully-fledged faceted search system, SemFacet, which we have evaluated over the Yago knowledge graph. + + OWL 2 + + + Algorithms + + + + + + + + + Jan Wielemaker + b08951b8e7e528c076d4a62b8ba542ae4ac69789 + + + + Jan Wielemaker + + Jan Wielemaker + + + + + + + + + + + + + + + + + + + + 04ff3b804243a76261a8fc27f6d0033321e83ecc + + + + + + Bahaa Eldesouky + + + Bahaa Eldesouky + Bahaa Eldesouky + + + + + + Urban LOD + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + Urban Problem + + + Illegally Parked Bicycles + + Open Urban Data + + + + Urban Problem + + + + Open Urban Data + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + + Illegally Parked Bicycles + Urban LOD + + The illegal parking of bicycles is a social problem in Tokyo and other urban areas. The purpose of this study was to sustainably build Linked Open Data (LOD) for the illegally parked bicycles and to support the problem solving by raising social awareness, in cooperation with the Bureau of General Affairs of Tokyo. We first extracted information on the problem factors and designed LOD schema for illegally parked bicycles. Then we collected pieces of data from Social Networking Service (SNS) and websites of municipalities to build the illegally parked bicycle LOD (IPBLOD) with more than 200,000 triples. We then estimated the missing data in the LOD based on the causal relations from the problem factors. As a result, the number of illegally parked bicycles can be inferred with 70.9% accuracy. Finally, we published the complemented LOD and a Web application to visualize the distribution of illegally parked bicycles in the city. We hope this raises social attention on this issue. + + + The illegal parking of bicycles is a social problem in Tokyo and other urban areas. The purpose of this study was to sustainably build Linked Open Data (LOD) for the illegally parked bicycles and to support the problem solving by raising social awareness, in cooperation with the Bureau of General Affairs of Tokyo. We first extracted information on the problem factors and designed LOD schema for illegally parked bicycles. Then we collected pieces of data from Social Networking Service (SNS) and websites of municipalities to build the illegally parked bicycle LOD (IPBLOD) with more than 200,000 triples. We then estimated the missing data in the LOD based on the causal relations from the problem factors. As a result, the number of illegally parked bicycles can be inferred with 70.9% accuracy. Finally, we published the complemented LOD and a Web application to visualize the distribution of illegally parked bicycles in the city. We hope this raises social attention on this issue. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Yohei Yamane + + + + + + Yohei Yamane + a252745c5866421122de6b67e10caffc0a31658f + + + + Yohei Yamane + + + + + + + Fact validation + + DeFacto - Temporal and multilingual Deep Fact Validation + DeFacto - Temporal and multilingual Deep Fact Validation + + Web of Data + + + + + + + + + + Provenance + NLP + + + NLP + + + DeFacto - Temporal and multilingual Deep Fact Validation + + + One of the main tasks when creating and maintaining knowledge bases is to validate facts and provide sources for them in order to ensure correctness and traceability of the provided knowledge. So far, this task is often addressed by human curators in a three-step process: issuing appropriate keyword queries for the statement to check using standard search engines, retrieving potentially relevant documents and screening those documents for relevant content. The drawbacks of this process are manifold. Most importantly, it is very time-consuming as the experts have to carry out several search processes and must often read several documents. In this article, we present DeFacto (Deep Fact Validation)—an algorithm able to validate facts by finding trustworthy sources for them on the Web. DeFacto aims to provide an effective way of validating facts by supplying the user with relevant excerpts of web pages as well as useful additional information including a score for the confidence DeFacto has in the correctness of the input fact. To achieve this goal, DeFacto collects and combines evidence from web pages written in several languages. In addition, DeFacto provides support for facts with a temporal scope, i.e., it can estimate in which time frame a fact was valid. Given that the automatic evaluation of facts has not been paid much attention to so far, generic benchmarks for evaluating these frameworks were not previously available. We thus also present a generic evaluation framework for fact checking and make it publicly available. + One of the main tasks when creating and maintaining knowledge bases is to validate facts and provide sources for them in order to ensure correctness and traceability of the provided knowledge. So far, this task is often addressed by human curators in a three-step process: issuing appropriate keyword queries for the statement to check using standard search engines, retrieving potentially relevant documents and screening those documents for relevant content. The drawbacks of this process are manifold. Most importantly, it is very time-consuming as the experts have to carry out several search processes and must often read several documents. In this article, we present DeFacto (Deep Fact Validation)—an algorithm able to validate facts by finding trustworthy sources for them on the Web. DeFacto aims to provide an effective way of validating facts by supplying the user with relevant excerpts of web pages as well as useful additional information including a score for the confidence DeFacto has in the correctness of the input fact. To achieve this goal, DeFacto collects and combines evidence from web pages written in several languages. In addition, DeFacto provides support for facts with a temporal scope, i.e., it can estimate in which time frame a fact was valid. Given that the automatic evaluation of facts has not been paid much attention to so far, generic benchmarks for evaluating these frameworks were not previously available. We thus also present a generic evaluation framework for fact checking and make it publicly available. + Fact validation + + + Web of Data + + Provenance + + + + + + + + + + + + + + + + + + + + + + + + + Rules with exceptions + Rules with exceptions + + + Exception-enriched Rule Learning from Knowledge Graphs + Advances in information extraction have enabled the automatic construction of large knowledge graphs (KGs) like DBpedia, Freebase, Yago and Wikidata. These KGs are inevitably bound to be incomplete. To fill in the gaps, data correlations in the KG can be analyzed to infer Horn rules and to predict new facts. However, Horn rules do not take into account possible exceptions, so that predicting facts via such rules introduces errors. +To overcome this problem, we present a method for effective revision of learned Horn rules by effectively incorporating exceptions (i.e., negated atoms) into their bodies. This way errors are largely reduced. We apply our method to discover rules with exceptions from real-world KGs. Our experimental results demonstrate the effectiveness of the developed method and the improvements in accuracy for KG completion by rule-based fact prediction. + + + + Knowledge graphs + + Knowledge base completion + + + Exception-enriched Rule Learning from Knowledge Graphs + + + Rule mining + + Knowledge graphs + Exception-enriched Rule Learning from Knowledge Graphs + + + Advances in information extraction have enabled the automatic construction of large knowledge graphs (KGs) like DBpedia, Freebase, Yago and Wikidata. These KGs are inevitably bound to be incomplete. To fill in the gaps, data correlations in the KG can be analyzed to infer Horn rules and to predict new facts. However, Horn rules do not take into account possible exceptions, so that predicting facts via such rules introduces errors. +To overcome this problem, we present a method for effective revision of learned Horn rules by effectively incorporating exceptions (i.e., negated atoms) into their bodies. This way errors are largely reduced. We apply our method to discover rules with exceptions from real-world KGs. Our experimental results demonstrate the effectiveness of the developed method and the improvements in accuracy for KG completion by rule-based fact prediction. + Knowledge base completion + Rule mining + + + + + + + + + Khaled Rasheed + + + Khaled Rasheed + + + + 0dcf9fe7adc3bfea3c0dade5dc57da19e188ffd4 + + + + Khaled Rasheed + + + + + + + + + + + + + + Giuseppe Rizzo + + + Giuseppe Rizzo + + + d6db5bfd6b52117e6f1162c66351046a1b87c8f4 + + Giuseppe Rizzo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Soheila Dehghanzadeh + Soheila Dehghanzadeh + e510ea1c0a6ba7bc006ec1c4c5e2d07c9fb725cc + + + Soheila Dehghanzadeh + + + + + + + + + + + + + + Armen Inants + + + + + + Armen Inants + + + + 0bc71d13b13b2b87aa2d33f3857a17fd5f9fe8b6 + + Armen Inants + + + + + + + + + + + German Research Center for Artificial Intelligence + + German Research Center for Artificial Intelligence + + + + + + German Research Center for Artificial Intelligence + + + + + + + + + British Library + + + British Library + British Library + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ermei Cao + + a26a2d07a4430148de3b8b4c26c2bac7362544dd + + Ermei Cao + + + + Ermei Cao + + + + + + + + + + + Ontologies + + + FOOD: FOod in Open Data + + Linked Open Data + + + + + + + FOOD: FOod in Open Data + This paper describes the outcome of an e-government project named FOOD, FOod in Open Data, which was carried out in the context of a collaboration between the Institute of Cognitive Sciences and Technologies of the Italian National Research Council, the Italian Ministry of Agriculture (MIPAAF) and the Italian Digital Agency (AgID). In particular, we implemented several ontologies for describing protected names of products (wine, pasta, fish, oil, etc.). In addition, we present the process carried out for producing and publishing a LOD dataset containing data extracted from existing Italian policy documents on such products and compliant with the aforementioned ontologies. + Ontologies + + This paper describes the outcome of an e-government project named FOOD, FOod in Open Data, which was carried out in the context of a collaboration between the Institute of Cognitive Sciences and Technologies of the Italian National Research Council, the Italian Ministry of Agriculture (MIPAAF) and the Italian Digital Agency (AgID). In particular, we implemented several ontologies for describing protected names of products (wine, pasta, fish, oil, etc.). In addition, we present the process carried out for producing and publishing a LOD dataset containing data extracted from existing Italian policy documents on such products and compliant with the aforementioned ontologies. + Linked Open Data + + FOOD: FOod in Open Data + + Ontology Design Patterns + + + + + + + + Ontology Design Patterns + + + + + + Axel Polleres + + + + + + + Axel Polleres + be73142926111b35aed1f1001bc62a1edb3ea1d1 + + + + Axel Polleres + + + + + + Hiroshi Umemoto + + + + + Hiroshi Umemoto + + + 8280495beca572e92aed89d57c1f99dd9e45d066 + Hiroshi Umemoto + + + + + The rapid growth of the Linked Open Data cloud, as well as the increasing ability to lift relational enterprise datasets to a semantic, ontology-based level means that vast amounts of information are now available in a representation that closely matches the conceptualizations of the potential users of this information. This makes it interesting to create ontology based, user-oriented tools for searching and exploring this data. Although initial efforts were intended for tech users with knowledge of SPARQL/RDF, there are ongoing proposals designed for lay users. One of the most promising approaches is to use visual query interfaces, but more user studies are needed to assess their effectiveness. In this paper, we compare the effect on usability of two important paradigms for ontology-based query interfaces: form-based and graph-based interfaces. In order to reduce the number of variables affecting the comparison, we performed a user study with two state-of-the-art query tools developed by ourselves, sharing a large part of the code base: the graph-based tool OptiqueVQS*, and the form-based tool PepeSearch. We evaluated these tools in a formal comparison study with 15 participants searching a Linked Open Data version of the Norwegian Company Registry. Participants had to respond to 6 non-trivial search tasks using alternately OptiqueVQS* and PepeSearch. Even without previous training, retrieval performance and user confidence were very high, thus suggesting that both interface designs are effective for searching RDF datasets. Expert searchers had a clear preference for the graph-based interface, and mainstream searchers obtained better performance and confidence with the form-based interface. While a number of participants spontaneously praised the capability of the graph interface for composing complex queries, our results evidence that graph interfaces are difficult to grasp. In contrast, form interfaces are more learnable and relieve problems with disorientation for mainstream users. We have also observed positive results introducing faceted search and dynamic term suggestion in semantic search interfaces. + + + User studies + + + + + + + + + + + + + + + Visual query interfaces for semantic datasets: an evaluation study + + Visual query interfaces + Visual query interfaces for semantic datasets: an evaluation study + Usability + Semantic search + Usability + + + User studies + + Visual query interfaces + + Semantic search + The rapid growth of the Linked Open Data cloud, as well as the increasing ability to lift relational enterprise datasets to a semantic, ontology-based level means that vast amounts of information are now available in a representation that closely matches the conceptualizations of the potential users of this information. This makes it interesting to create ontology based, user-oriented tools for searching and exploring this data. Although initial efforts were intended for tech users with knowledge of SPARQL/RDF, there are ongoing proposals designed for lay users. One of the most promising approaches is to use visual query interfaces, but more user studies are needed to assess their effectiveness. In this paper, we compare the effect on usability of two important paradigms for ontology-based query interfaces: form-based and graph-based interfaces. In order to reduce the number of variables affecting the comparison, we performed a user study with two state-of-the-art query tools developed by ourselves, sharing a large part of the code base: the graph-based tool OptiqueVQS*, and the form-based tool PepeSearch. We evaluated these tools in a formal comparison study with 15 participants searching a Linked Open Data version of the Norwegian Company Registry. Participants had to respond to 6 non-trivial search tasks using alternately OptiqueVQS* and PepeSearch. Even without previous training, retrieval performance and user confidence were very high, thus suggesting that both interface designs are effective for searching RDF datasets. Expert searchers had a clear preference for the graph-based interface, and mainstream searchers obtained better performance and confidence with the form-based interface. While a number of participants spontaneously praised the capability of the graph interface for composing complex queries, our results evidence that graph interfaces are difficult to grasp. In contrast, form interfaces are more learnable and relieve problems with disorientation for mainstream users. We have also observed positive results introducing faceted search and dynamic term suggestion in semantic search interfaces. + Visual query interfaces for semantic datasets: an evaluation study + + + + + + + + Yu Sugawara + + + Yu Sugawara + + + 429c9c0ab6250d527315b88a1d93ab3bd82cef6f + + Yu Sugawara + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Structuring Linked Data Search Results Using Probabilistic Soft Logic + + + + + + + linked data integration + + probabilistic soft logic + linked data integration + On-the-fly generation of integrated representations of Linked Data (LD) + search results is challenging because it requires successfully automating a number + of complex subtasks, such as structure inference and matching of + both instances and concepts, each of which gives rise to uncertain + outcomes. Such uncertainty is unavoidable given the semantically heterogeneous + nature of web sources, including LD ones. This paper approaches the problem of + structuring LD search results as an evidence-based one. In particular, the paper shows + how one formalism (viz., probabilistic soft logic (PSL)) can be exploited to assimilate + different sources of evidence in a principled way and to beneficial + effect for users. The paper considers syntactic evidence derived from matching + algorithms, semantic evidence derived from LD vocabularies, and user + evidence, in the form of feedback. The main contributions are: sets + of PSL rules that model the uniform assimilation of diverse kinds of evidence, + an empirical evaluation of how the resulting PSL programs perform in terms + of their ability to infer structure in LD search results, and, finally, a concrete + example of how populating such inferred structures for presentation + to the end user is beneficial, besides enabling the collection of + feedback whose assimilation further improves search result presentation. + + + + linked data search + + probabilistic soft logic + On-the-fly generation of integrated representations of Linked Data (LD) + search results is challenging because it requires successfully automating a number + of complex subtasks, such as structure inference and matching of + both instances and concepts, each of which gives rise to uncertain + outcomes. Such uncertainty is unavoidable given the semantically heterogeneous + nature of web sources, including LD ones. This paper approaches the problem of + structuring LD search results as an evidence-based one. In particular, the paper shows + how one formalism (viz., probabilistic soft logic (PSL)) can be exploited to assimilate + different sources of evidence in a principled way and to beneficial + effect for users. The paper considers syntactic evidence derived from matching + algorithms, semantic evidence derived from LD vocabularies, and user + evidence, in the form of feedback. The main contributions are: sets + of PSL rules that model the uniform assimilation of diverse kinds of evidence, + an empirical evaluation of how the resulting PSL programs perform in terms + of their ability to infer structure in LD search results, and, finally, a concrete + example of how populating such inferred structures for presentation + to the end user is beneficial, besides enabling the collection of + feedback whose assimilation further improves search result presentation. + + + + + Structuring Linked Data Search Results Using Probabilistic Soft Logic + Structuring Linked Data Search Results Using Probabilistic Soft Logic + linked data search + + + + + + + + + + + + + + + + + + + + + Konrad Höffner + Konrad Höffner + + Konrad Höffner + + + + + + 4ffd792932126fe6ab4d187c84c65c0f9c665057 + + + + user experience + semantic data + + search + + + + + relational data + + Benchmarking End-User Structured Data Search and Exploration + + semantic data + + + + exploration + + + exploration + + usability + The Semantic Web Community has invested significant research effort in developing systems for Semantic Web search and exploration. But while it has been easy to assess the systems' computational efficiency, it has been much harder to assess how well different semantic systems help their users find and browse information. In this article, we propose and demonstrate the use of a benchmark for evaluating them, similar to the TREC benchmark for evaluating traditional search engines. Our benchmark includes a set of typical user tasks and a well-defined procedure for assigning a measure of performance on those tasks to a semantic system. We demonstrate its application to one such system, Rhizomer. We intend for this work to initiate a community conversation that will lead to a general accepted framework for comparing systems and measuring, and thus encouraging, progress towards better semantic search and exploration tools. + Benchmarking End-User Structured Data Search and Exploration + relational data + + Benchmarking End-User Structured Data Search and Exploration + user experience + + + The Semantic Web Community has invested significant research effort in developing systems for Semantic Web search and exploration. But while it has been easy to assess the systems' computational efficiency, it has been much harder to assess how well different semantic systems help their users find and browse information. In this article, we propose and demonstrate the use of a benchmark for evaluating them, similar to the TREC benchmark for evaluating traditional search engines. Our benchmark includes a set of typical user tasks and a well-defined procedure for assigning a measure of performance on those tasks to a semantic system. We demonstrate its application to one such system, Rhizomer. We intend for this work to initiate a community conversation that will lead to a general accepted framework for comparing systems and measuring, and thus encouraging, progress towards better semantic search and exploration tools. + + + + benchmark + + usability + + benchmark + search + + + + + + + + + + + + + + + Auckland University of Technology + + + Auckland University of Technology + + + + + Auckland University of Technology + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T11:30:00 + 2016-10-20T11:10:00 + + + 2016-10-20T11:10:00 + Daniel Hernandez, Aidan Hogan, Cristian Riveros, Carlos Rojas and Enzo Zerega + 2016-10-20T11:30:00 + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + 2016-10-20T11:30:00 + 2016-10-20T11:30:00 + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e409b5eceff3b8cf4be69005301c6984fa6ceae3 + + + Jeff Z. Pan + + + Jeff Z. Pan + Jeff Z. Pan + + + + + + + + + + + + + + Corentin Jouault + + + + + 6aa1c38a4568fa3bc90618e86135e2b8e37a34ed + + + + Corentin Jouault + + + Corentin Jouault + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kleanthi Georgala + + Kleanthi Georgala + + + d160fc13181daaa28c946ed7699a99d5a06201cd + + Kleanthi Georgala + + + + + + + + + + + + + + + + + + + + + + + + + + + Davide Lanti + + + + + de114ecaecd68c6eba2c09ba149487b21fbb31dd + + + Davide Lanti + + Davide Lanti + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ontology + + + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + Ontology + + + + A method for quantifying working processes on manufacturing floors was established that uses a wearable sensor device and an ontology-based stream data processing system. +Using this method, the measurement of manufacturing process efficiency from sensor data extracted from such a device worn by workers on the job was confirmed at the Fuji Xerox factory. + + + Wearable device + + A method for quantifying working processes on manufacturing floors was established that uses a wearable sensor device and an ontology-based stream data processing system. +Using this method, the measurement of manufacturing process efficiency from sensor data extracted from such a device worn by workers on the job was confirmed at the Fuji Xerox factory. + + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + Wearable device + IOT + + Stream data processing + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + + + + + + + + + Stream data processing + IOT + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Esteban Gonzalez + + + + + 21e27cb7ab09b1bff3892cf43b13f33164c93811 + Esteban Gonzalez + + Esteban Gonzalez + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Semantic labeling: A domain-independent approach + 2016-10-21T16:30:00 + + + + Semantic labeling: A domain-independent approach + 2016-10-21T16:10:00 + 2016-10-21T16:30:00 + 2016-10-21T16:10:00 + 2016-10-21T16:30:00 + Minh Pham, Suresh Alse, Craig Knoblock and Pedro Szekely + 2016-10-21T16:30:00 + + + + + + + + + ISEP, BILab + + + + + + ISEP, BILab + + ISEP, BILab + + + + + + + + + + + + + + + + + data integration + A RDF based Portal of Biological Phenotype Data produced in Japan + + Biological phenotype + We developed RDF-based databases of phenotype and animal strains produced in Japan and a portal site termed as “J-Phenome”. By the application of common schema, these databases can be retrieved by the same SPARQL query across graphs. In the operation of these databases, RDF represented multiple advantages such as improvement of comprehensive search, data integration using ontologies and public data, reuse of data and wider dissemination of phenotype data compared to conventional technologies. + + + + + + + RDF + RDF + + + + + Biological phenotype + + + + We developed RDF-based databases of phenotype and animal strains produced in Japan and a portal site termed as “J-Phenome”. By the application of common schema, these databases can be retrieved by the same SPARQL query across graphs. In the operation of these databases, RDF represented multiple advantages such as improvement of comprehensive search, data integration using ontologies and public data, reuse of data and wider dissemination of phenotype data compared to conventional technologies. + data integration + A RDF based Portal of Biological Phenotype Data produced in Japan + + + + + + + A RDF based Portal of Biological Phenotype Data produced in Japan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Accenture Technology Labs + + + + Accenture Technology Labs + Accenture Technology Labs + + + + + + + + + + + + + + + + 2016-10-20T13:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + Search (I) + 2016-10-20T14:50:00 + Search (I) + 2016-10-20T13:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vienna University of Economy and Business (WU) + Vienna University of Economy and Business (WU) + + + + + + + + + Vienna University of Economy and Business (WU) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + IRIT + + + + IRIT + IRIT + + + + + + + + + + + + + + Makoto Nakatsuji + 748dfd31068eb8b775edb25f2594071336437b28 + Makoto Nakatsuji + + Makoto Nakatsuji + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 3bc2893aa0872907eadc78ded2963c3fa0653366 + + + + + + Mikhail Roshchin + + Mikhail Roshchin + + + Mikhail Roshchin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Directed Acyclic Graph(DAG) data is increasingly available on the Web, including Linked Open Data(LOD). Mining reachability relationships between entities is an important task for extracting knowledge from LOD. Diverse labeling schemes have been proposed to efficiently determine the reachability. We focus on a state-of-the-art 2-hop labeling scheme that is based on a permutation of vertices to achieve a linear index size and reduce on-line searches that are required when the reachability cannot be answered by 2-hop labels only. We observed that the approach can be improved in three different ways; 1) space-efficiency - guarantee the minimized index size without randomness 2) update-efficiency - update labels efficiently when graphs changes 3) parallelization - labeling should be cluster-based, and solved in a distributed fashion. In these regards, this PhD thesis proposes optimization techniques that address these issues. In this paper in particular, a way of reducing the 2-hop label size is proposed with preliminary results on real-world DAG datasets. In addition, we will discuss the feasibilities of the other issues based on our on-going works. + Linked Open Data + Directed Acyclic Graph(DAG) data is increasingly available on the Web, including Linked Open Data(LOD). Mining reachability relationships between entities is an important task for extracting knowledge from LOD. Diverse labeling schemes have been proposed to efficiently determine the reachability. We focus on a state-of-the-art 2-hop labeling scheme that is based on a permutation of vertices to achieve a linear index size and reduce on-line searches that are required when the reachability cannot be answered by 2-hop labels only. We observed that the approach can be improved in three different ways; 1) space-efficiency - guarantee the minimized index size without randomness 2) update-efficiency - update labels efficiently when graphs changes 3) parallelization - labeling should be cluster-based, and solved in a distributed fashion. In these regards, this PhD thesis proposes optimization techniques that address these issues. In this paper in particular, a way of reducing the 2-hop label size is proposed with preliminary results on real-world DAG datasets. In addition, we will discuss the feasibilities of the other issues based on our on-going works. + doctoralconsortium-2-hop labeling + + + Linked Open Data + LOD + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + Directed Acyclic Graph + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + LOD + + DAG + + graph reachability + + DAG + + doctoralconsortium-2-hop labeling + + + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + + + graph reachability + + + Directed Acyclic Graph + + + + + + + + + + + + + + + + + + + Paul Groth + + Paul Groth + + + + Paul Groth + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Freddy Lecue, John Vard and Jiewen Wu + 2016-10-19T21:00:00 + + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Michael Martin + + Michael Martin + + + + Michael Martin + + 577bfc91a74510adea7fbbb5686629985f5c6864 + + + + + + + + + + 2016-10-19T12:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + Lunch + Lunch + 2016-10-19T12:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pistoia Alliance + + + + + Pistoia Alliance + Pistoia Alliance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + + + 2016-10-19T21:00:00 + Stefano Faralli, Christian Bizer, Kai Eckert, Robert Meusel and Simone Paolo Ponzetto + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kai Eckert + a123c0989154667c08f2ca1814cb0e750c514cfd + + + + + Kai Eckert + + + + + + Kai Eckert + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Octavian Rinciog + Octavian Rinciog + + + + + + + + + Octavian Rinciog + + 44b2319b9be44404698a32a105b1c10b6287d435 + + + + + + + + + + + + + + Achille Fokoue + 42e105e76a4873077b7c63e06d51946a89f0eafd + + Achille Fokoue + + + + + + + Achille Fokoue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Insight Centre for Data Analytics, National University of Ireland + + + Insight Centre for Data Analytics, National University of Ireland + + + + + + Insight Centre for Data Analytics, National University of Ireland + + + + + + + + + + + + + + + + + + + + + Ran Yu + + + Ran Yu + + + + + + + + + + Ran Yu + cbf775d617898b6bf5237bf149e3ca57a5a1e8f8 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + triple pattern fragments + federated querying + linked data + + Linked Data can be distributed through multiple interfaces on the Web, +each of them with their own expressivity. +However, there is no generic client available that can handle querying over multiple interfaces. +This increases the complexity of combining datasets and designing new interfaces. +One can imagine the difficulties that arise +when trying to create a client querying various interfaces at the same time, +that can be discovered just in time. +To this end, I aim to design a generic Linked Data querying engine +capable of handling different interfaces that can easily be extended. +Rule-based reasoning is going to be explored +to combine different interfaces without intervention of a human developer. +Using an iterative approach to extend Linked Data interfaces, +I am going to evaluate different querying set-ups for the SPARQL language. +Preliminary results indicate a broad spectrum of yet to be explored options. +As the PhD is still in an early phase, we hope to narrow the scope in the next months, +based on feedback of the doctoral consortium. + linked data fragments + + reasoning + linked data fragments + sparql + sparql + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + + + + + triple pattern fragments + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + linked data + + reasoning + + + federated querying + + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + Linked Data can be distributed through multiple interfaces on the Web, +each of them with their own expressivity. +However, there is no generic client available that can handle querying over multiple interfaces. +This increases the complexity of combining datasets and designing new interfaces. +One can imagine the difficulties that arise +when trying to create a client querying various interfaces at the same time, +that can be discovered just in time. +To this end, I aim to design a generic Linked Data querying engine +capable of handling different interfaces that can easily be extended. +Rule-based reasoning is going to be explored +to combine different interfaces without intervention of a human developer. +Using an iterative approach to extend Linked Data interfaces, +I am going to evaluate different querying set-ups for the SPARQL language. +Preliminary results indicate a broad spectrum of yet to be explored options. +As the PhD is still in an early phase, we hope to narrow the scope in the next months, +based on feedback of the doctoral consortium. + + + + 2016-10-20T14:30:00 + + + 2016-10-20T14:30:00 + + Lei Zhang and Achim Rettinger + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + 2016-10-20T14:10:00 + 2016-10-20T14:30:00 + 2016-10-20T14:10:00 + 2016-10-20T14:30:00 + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T17:00:00 + + Joachim Van Herwegen + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + 2016-10-18T16:45:00 + 2016-10-18T17:00:00 + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + 2016-10-18T16:45:00 + + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + Ontologies for Knowledge Graphs: Breaking the Rules + Ontologies for Knowledge Graphs: Breaking the Rules + 2016-10-19T11:20:00 + 2016-10-19T11:40:00 + Markus Krötzsch and Veronika Thost + + 2016-10-19T11:40:00 + + 2016-10-19T11:40:00 + + + + + + + + + + + + + + + + + Linked data has the potential of interconnecting data from different domains, bringing new potentials to machine agents to provide better services for web users. The ever increasing amount of linked data in government open data, social linked data, linked medical and patients’ data provides new opportuni-ties for data mining and machine learning. Both are however strongly de-pendent on the selection of high quality data features to achieve good results. In this work we present an approach that uses ontological knowledge to gen-erate features that are suitable for building a decision tree classifier address-ing the specific data set and classification problem. The approach that we present has two main characteristics - it generates new features on demand as required by the induction algorithm and uses ontological knowledge about linked data to restrict the set of possible options. These two characteristics enable the induction algorithm to look for features that might be connected through many entities in the linked data enabling the generation of cross-domain explanation models. + + + linked data + semantic relatedness + + + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + Hoeffding bound + feature generation + Hoeffding tree + ontology + + + semantic relatedness + + + RDF + Hoeffding tree + ontology + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + + linked data + RDF + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + + + feature generation + Hoeffding bound + decision tree + Linked data has the potential of interconnecting data from different domains, bringing new potentials to machine agents to provide better services for web users. The ever increasing amount of linked data in government open data, social linked data, linked medical and patients’ data provides new opportuni-ties for data mining and machine learning. Both are however strongly de-pendent on the selection of high quality data features to achieve good results. In this work we present an approach that uses ontological knowledge to gen-erate features that are suitable for building a decision tree classifier address-ing the specific data set and classification problem. The approach that we present has two main characteristics - it generates new features on demand as required by the induction algorithm and uses ontological knowledge about linked data to restrict the set of possible options. These two characteristics enable the induction algorithm to look for features that might be connected through many entities in the linked data enabling the generation of cross-domain explanation models. + + decision tree + + + + + + + + + + + + + + + + Lorenz Bühmann + + + Lorenz Bühmann + + Lorenz Bühmann + 6ee6fbd8b4b0d98ca0f572ec43bfee4b3e699f9c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + An On-Line Learning to Query System + 2016-10-19T21:00:00 + An On-Line Learning to Query System + 2016-10-19T18:00:00 + + Jędrzej Potoniec + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + b0dd6a6493ec27189fb906b672d38f57a02776a9 + Özgür Lütfü Özcep + + + + + + Özgür Lütfü Özcep + Özgür Lütfü Özcep + + + + + + + + + + + + + Cyril Chapellier + d3bc2053f4cc681a54c86a72e598db673559ef2f + + Cyril Chapellier + Cyril Chapellier + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Client-server trade-offs can be analyzed using Linked Data Fragments, +which proposes an uniform view on all interfaces to rdf. This reveals a complete +spectrum between Linked Data documents and the sparql protocol, in which +we can advance the state-of-the-art of Linked Data publishing. This axis can be +explored in the following two dimensions: i) Selector, allowing different, more +complex questions for the server; and ii) Metadata, extending the response +with more information clients can use. +This work studies the second Metadata dimension in a practical Web context. +Considering the conditions on the Web, this problem becomes three-fold. First, +analog to the Web itself, ldf interfaces should exist in a distributed, scalable +manner in order to succeed. Generating additional metadata introduces overhead +on the server, which influences the ability to scale towards multiple clients. Second, +the communication between client and server uses the http protocol. Modeling, +serialization, and compression determine the extra load the overall network traffic. +Third, with query execution on the client, novel approaches need to apply this +metadata intelligently to increase efficiency. +Concretely, this work defines and evaluates a series of transparent, interchangeable, +and discoverable interface features. We proposed Triple Pattern Fragments, a Linked Data api with low-server cost, as a fundamental base . This +interface uses a single triple pattern as selector. To explore this research space, +we append this interface with different metadata, starting with an estimated +number of total matching triples. By combining several tpfs, sparql queries are +evaluated on the client-side, using the metadata for optimization. Hence, we can +measure the query execution + + + Metadata + Client-server trade-offs can be analyzed using Linked Data Fragments, +which proposes an uniform view on all interfaces to rdf. This reveals a complete +spectrum between Linked Data documents and the sparql protocol, in which +we can advance the state-of-the-art of Linked Data publishing. This axis can be +explored in the following two dimensions: i) Selector, allowing different, more +complex questions for the server; and ii) Metadata, extending the response +with more information clients can use. +This work studies the second Metadata dimension in a practical Web context. +Considering the conditions on the Web, this problem becomes three-fold. First, +analog to the Web itself, ldf interfaces should exist in a distributed, scalable +manner in order to succeed. Generating additional metadata introduces overhead +on the server, which influences the ability to scale towards multiple clients. Second, +the communication between client and server uses the http protocol. Modeling, +serialization, and compression determine the extra load the overall network traffic. +Third, with query execution on the client, novel approaches need to apply this +metadata intelligently to increase efficiency. +Concretely, this work defines and evaluates a series of transparent, interchangeable, +and discoverable interface features. We proposed Triple Pattern Fragments, a Linked Data api with low-server cost, as a fundamental base . This +interface uses a single triple pattern as selector. To explore this research space, +we append this interface with different metadata, starting with an estimated +number of total matching triples. By combining several tpfs, sparql queries are +evaluated on the client-side, using the metadata for optimization. Hence, we can +measure the query execution + Studying Metadata for better client-server trade-offs in Linked Data publishing + + Studying Metadata for better client-server trade-offs in Linked Data publishing + Linked Data Fragments + + + + Linked Data Fragments + + Studying Metadata for better client-server trade-offs in Linked Data publishing + Semantic Web + Semantic Web + + Metadata + + + + + + + + + + + + + + + + + + + + + + + Roger Nkambou + + + + Roger Nkambou + + e39442f80ef81e65a31cfdfd6d5294baa7d72caf + Roger Nkambou + + + + + 3c84a2d7b88aaaee9abc3f6a599604813ee021d8 + + + + + + + Mark Sandler + Mark Sandler + + + + + Mark Sandler + + + + + + + + + + + + + + + + Heiner Stuckenschmidt + Heiner Stuckenschmidt + + + + + Heiner Stuckenschmidt + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:45:00 + 2016-10-18T11:30:00 + 2016-10-18T11:45:00 + Syed Muhammad Ali Hasnain + 2016-10-18T11:45:00 + A - Posteriori Data Integration for Life Sciences + + A - Posteriori Data Integration for Life Sciences + + 2016-10-18T11:45:00 + + 2016-10-18T11:30:00 + + + ef2204ce2cc114efa6aeb83d4fcd4b63605524c7 + Minh-Duc Pham + + + + Minh-Duc Pham + + + + + + + + + Minh-Duc Pham + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + + 2016-10-19T21:00:00 + + Michel Héon, Roger Nkambou and Mohamed Gaha + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Md. Kamruzzaman Sarker + + + a4a440701c140cde7e64e7c3913fd8f99bbad380 + Md. Kamruzzaman Sarker + + + + Md. Kamruzzaman Sarker + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Stefanos Roumeliotis + + + + + + + + + Stefanos Roumeliotis + + + + Stefanos Roumeliotis + + + + + + + + + + + + + RWTH Aachen University + + RWTH Aachen University + + RWTH Aachen University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kaiser Permanente + + + Kaiser Permanente + Kaiser Permanente + + + + + + + + + + + + + + + + + + + + 23b24b58894d8da006c6c317e320e42431a9ec00 + + Lara S. G. Piccolo + + + Lara S. G. Piccolo + + + + + + Lara S. G. Piccolo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nicolas Matentzoglu + + + + Nicolas Matentzoglu + + 8bfb88288cdf2930fac4c97945b88d52fec277bc + Nicolas Matentzoglu + + + + + + + + + + + + + + + + + + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + + + + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + Numerous analytical tasks in industry rely on data integration solutions since they require data from multiple static and streaming data sources. In the context of the Optique project we have investigated how Semantic Technologies can enhance data integration and thus facilitate further data analysis. We introduced the notion Ontology-Based Stream-Static Data Integration and developed the system Optique to put our ideas in practice. In this demo we will show how Optique can help in diagnostics of power generating turbines in Siemens Energy. For this purpose we prepared anonymised streaming and static data from 950 Siemens power generating turbines with more than 100,000 sensors and deployed Optique on distributed environments with 128 nodes. The demo attendees will be able to see do diagnostics of turbines by registering and monitoring continuous queries that combine streaming and static data; to test scalability of our devoted stream management system that is able to process up to 1024 concurrent complex diagnostic queries with a 10 TB/day throughput; and to deploy Optique over Siemens demo data using our devoted interactive system to create abstraction semantic layers over data sources. + + + ontologies + streaming data + + + + + + + + + Numerous analytical tasks in industry rely on data integration solutions since they require data from multiple static and streaming data sources. In the context of the Optique project we have investigated how Semantic Technologies can enhance data integration and thus facilitate further data analysis. We introduced the notion Ontology-Based Stream-Static Data Integration and developed the system Optique to put our ideas in practice. In this demo we will show how Optique can help in diagnostics of power generating turbines in Siemens Energy. For this purpose we prepared anonymised streaming and static data from 950 Siemens power generating turbines with more than 100,000 sensors and deployed Optique on distributed environments with 128 nodes. The demo attendees will be able to see do diagnostics of turbines by registering and monitoring continuous queries that combine streaming and static data; to test scalability of our devoted stream management system that is able to process up to 1024 concurrent complex diagnostic queries with a 10 TB/day throughput; and to deploy Optique over Siemens demo data using our devoted interactive system to create abstraction semantic layers over data sources. + + + + streaming data + + + + + + data access + + + + + static data + + + + + + + + + + + + + + + + static data + data access + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + ontologies + + + + + + + + + + + + + + + + + + + + + + + + + + + Houda Khrouf + + + + Houda Khrouf + + + 9972a88dceb3a990213b7781ba64f10a806c2200 + + + + Houda Khrouf + + + + + + + + + + + + + + + + + + + + + + DASPLab, DISI, University of Bologna + + DASPLab, DISI, University of Bologna + DASPLab, DISI, University of Bologna + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + An Evaluation of VIG with the BSBM Benchmark + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + An Evaluation of VIG with the BSBM Benchmark + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Davide Lanti, Guohui Xiao and Diego Calvanese + 2016-10-19T21:00:00 + + + + Francesco Osborne + + + + + Francesco Osborne + + + + + + + Francesco Osborne + 5c98580f6804602e7b7102cf99078e9f1a1a31e6 + + + + + + + + + + + + + + + + + + + + + + + + + + + flight planning + GTFS + + + + data enrichment + + data enrichment + flight planning + + + + + When searching for flights, current systems often suggest routes involving waiting times at stopovers. There might exist alternative routes which are more attractive from a touristic perspective because their duration is not necessarily much longer while offering enough time in an appropriate place. Choosing among such alternatives requires additional planning efforts to make sure that e.g. points of interest can conveniently be reached in the allowed time frame. We present a system that automatically computes smart trip alternatives between any two cities. To do so, it searches points of interest in large semantic datasets considering the set of accessible areas around each possible layover. It then elects feasible alternatives and displays their differences with respect to the default trip. + Smart Trip Alternatives for the Curious + GTFS + + Smart Trip Alternatives for the Curious + POIs + POIs + + + + Smart Trip Alternatives for the Curious + stopovers + + + + + + When searching for flights, current systems often suggest routes involving waiting times at stopovers. There might exist alternative routes which are more attractive from a touristic perspective because their duration is not necessarily much longer while offering enough time in an appropriate place. Choosing among such alternatives requires additional planning efforts to make sure that e.g. points of interest can conveniently be reached in the allowed time frame. We present a system that automatically computes smart trip alternatives between any two cities. To do so, it searches points of interest in large semantic datasets considering the set of accessible areas around each possible layover. It then elects feasible alternatives and displays their differences with respect to the default trip. + stopovers + + + + + + + + + + + + + + + + + FZI Research Center for Information Technology + + + + FZI Research Center for Information Technology + + + + FZI Research Center for Information Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pierre Geneves + + 1079cfa61e43a4a7fb5b3907d03bd3024570f566 + Pierre Geneves + + + + + + + + Pierre Geneves + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University Mannheim + University Mannheim + + + + University Mannheim + + + + + + + + Semantic Audit Application + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Katalin Ternai and Ildikó Szabó + Semantic Audit Application + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Changlong Wang + + Changlong Wang + + + + + 2c1a762edfc2413f3c922af78e176c30070a1b30 + + + + + + + Changlong Wang + + + Antske Fokkens + + + + + + + + + + Antske Fokkens + + + Antske Fokkens + + + + + + + + Adila A. Krisnadhi + 76fe98c9de14916373ed11a039db06c7fa0c9429 + + + + + + + Adila A. Krisnadhi + Adila A. Krisnadhi + + + + Record Linkage + + Record Linkage + + Unsupervised Entity Resolution on Multi-type Graphs + + Entity resolution + + + We address the problem of performing entity resolution on RDF graphs containing multiple types of nodes, using the links between instances of different types to improve the accuracy. For example, in a graph of products and manufacturers the goal is to resolve all the products and all the manufacturers. We formulate this problem as multi-type graph summarization problem, which involves clustering the nodes in each type that refer to the same entity into one super node and creating weighted links among super nodes that summarize the inter-cluster links in the original graph. Experiments show that the proposed approach outperforms several state-of-the-art generic entity resolution approaches, especially in data sets with one-to-many, many-to-many relations and attributes with missing values. + + + + + Entity resolution + + + + + + Multi-type Graph Summarization + Unsupervised Entity Resolution on Multi-type Graphs + Unsupervised Entity Resolution on Multi-type Graphs + + + + + + We address the problem of performing entity resolution on RDF graphs containing multiple types of nodes, using the links between instances of different types to improve the accuracy. For example, in a graph of products and manufacturers the goal is to resolve all the products and all the manufacturers. We formulate this problem as multi-type graph summarization problem, which involves clustering the nodes in each type that refer to the same entity into one super node and creating weighted links among super nodes that summarize the inter-cluster links in the original graph. Experiments show that the proposed approach outperforms several state-of-the-art generic entity resolution approaches, especially in data sets with one-to-many, many-to-many relations and attributes with missing values. + + Multi-type Graph Summarization + + + + + + + + + + + + + + + + + + + + + + + + + + + + Hatim Aouzal + + + + Hatim Aouzal + + 4f8c57c009a4c0d4755d8fe3d0a85e4a8269a9b3 + + + Hatim Aouzal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + STLab (ISTC-CNR) + + + + + + + STLab (ISTC-CNR) + + STLab (ISTC-CNR) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T14:30:00 + Ontop: Answering SPARQL queries over relational databases + Ontop: Answering SPARQL queries over relational databases + 2016-10-21T14:30:00 + 2016-10-21T14:10:00 + + + 2016-10-21T14:10:00 + 2016-10-21T14:30:00 + Diego Calvanese, Benjamin Cogrel, Sarah Komla-Ebri, Roman Kontchakov, Davide Lanti, Martin Rezk, Mariano Rodriguez-Muro and Guohui Xiao + + 2016-10-21T14:30:00 + + + + d7c46260e8a7f0570bb8cabc64fa362addda2b20 + + + + + + + + + + Ujwal Gadiraju + + Ujwal Gadiraju + Ujwal Gadiraju + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Masao Watanabe + + + Masao Watanabe + + + + + + + Masao Watanabe + + 70b7ce67759053909d6ed6b3af898c21c97f12f3 + + + + Pascal Hitzler + + + + + Pascal Hitzler + d9d5e01de07e6f9a5e8b66c44c995c5ca8cc3b63 + + + + Pascal Hitzler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e4f813d7844475a45b91ef78eeacea4b0faa4168 + Denis Parra + + Denis Parra + + + + + + + + + Denis Parra + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Bruno Charron + + + + + + Bruno Charron + + + + + Bruno Charron + + + fe57be327dcfcd3aa84114619f16cf5ed3657892 + + + Imen Megdiche + + + + + + + + Imen Megdiche + Imen Megdiche + + + 632b5e521bc9e23267093100ca3bb1a4612a5048 + + + + + Filippo Gramegna + + + + Filippo Gramegna + Filippo Gramegna + + 37a7ac9099047adc44c3d1e9da64ad367949e333 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jun Ding + + + + + + + Jun Ding + 219e99d828b66428ae2ae8e1835e4a71a59898c1 + + + + Jun Ding + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Discovering and Using Functions via Content Negotiation + 2016-10-19T21:00:00 + Discovering and Using Functions via Content Negotiation + 2016-10-19T18:00:00 + Ben De Meester, Anastasia Dimou, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + + + + + + + + + + + + + + + + + + + + + + + + + + + e77e0c204cbd1f67750ab520e2d2b7bd4b71ebcc + Frank Van Harmelen + + + + + + + + Frank Van Harmelen + + + Frank Van Harmelen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Information extraction + + Knowledge base generation + + Knowledge base generation + + Relation extraction + Wikipedia + Relation extraction + + + Wikipedia has been the primary source of information for many automatically-generated Semantic Web data sources. However, they suffer from incompleteness since they largely do not cover information contained in the unstructured texts of Wikipedia. Our goal is to extract structured entity-relationships in RDF from such unstructured texts, ultimately using them to enrich existing data sources. Our extraction technique is aimed to be topic-independent, leveraging grammatical dependency of sentences and context semantic refinement. Preliminary evaluations of the proposed approach has shown some promising results. + Information extraction + Entity-Relationship Extraction from Wikipedia Unstructured Text + Entity-Relationship Extraction from Wikipedia Unstructured Text + + + + + + + + + Entity-Relationship Extraction from Wikipedia Unstructured Text + Wikipedia + Wikipedia has been the primary source of information for many automatically-generated Semantic Web data sources. However, they suffer from incompleteness since they largely do not cover information contained in the unstructured texts of Wikipedia. Our goal is to extract structured entity-relationships in RDF from such unstructured texts, ultimately using them to enrich existing data sources. Our extraction technique is aimed to be topic-independent, leveraging grammatical dependency of sentences and context semantic refinement. Preliminary evaluations of the proposed approach has shown some promising results. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Domain Adaptation for Ontology Localization + John P. McCrae, Mihael Arcan, Kartik Asooja, Jorge Gracia, Paul Buitelaar and Philipp Cimiano + + Domain Adaptation for Ontology Localization + 2016-10-19T15:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + + 2016-10-19T15:00:00 + + + + + + + + + + + + + + + + + + + + + + + + Ghent University + + + + + + + Ghent University + Ghent University + + + Alvaro A. A. Fernandes + 2676f098258b7af5f2554fc4b41de7cd3d8a7dfb + + + + Alvaro A. A. Fernandes + + + + Alvaro A. A. Fernandes + + + + + + + + + + + + + + + + + + Wendy Hall + 27ee74540695fd57411b7614bc1826fd8e2bed4e + + + + + Wendy Hall + + Wendy Hall + + + + + + fd46b21a039a8af1044cab3a963a6a86f987cd7c + + + + Jędrzej Potoniec + + Jędrzej Potoniec + + + + Jędrzej Potoniec + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Victor Christen, Anika Groß and Erhard Rahm + + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + + + A Reuse-based Annotation Approach for Medical Documents + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + A Reuse-based Annotation Approach for Medical Documents + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Markus Freudenberg + + + Markus Freudenberg + + Markus Freudenberg + da0ea91b05a76afe4b955963e44225cbbca27b6e + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + National Institute of Informatics + + + + + National Institute of Informatics + National Institute of Informatics + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ontology alignment + + + + + + + + User validation is one of the challenges facing the ontology alignment community, as there are limits to the quality of automated alignment algorithms. +In this paper we present a broad study on user validation of ontology alignments that encompasses three distinct but interrelated aspects: the profile of the user, the services of the alignment system, and its user interface. We discuss key issues pertaining to the alignment validation process under each of these aspects, and provide an overview of how current systems address them. Finally, we use experiments from the Interactive Matching track of the Ontology Alignment Evaluation Initiative (OAEI) 2015 to assess the impact of errors in alignment validation, and how systems cope with them as function of their services. + ontology engineering + + + User validation is one of the challenges facing the ontology alignment community, as there are limits to the quality of automated alignment algorithms. +In this paper we present a broad study on user validation of ontology alignments that encompasses three distinct but interrelated aspects: the profile of the user, the services of the alignment system, and its user interface. We discuss key issues pertaining to the alignment validation process under each of these aspects, and provide an overview of how current systems address them. Finally, we use experiments from the Interactive Matching track of the Ontology Alignment Evaluation Initiative (OAEI) 2015 to assess the impact of errors in alignment validation, and how systems cope with them as function of their services. + + + + + + user interaction + + User validation in ontology alignment + + User validation in ontology alignment + + + User validation in ontology alignment + + user interaction + ontology engineering + + ontology alignment + + + + + + + + + + + Khadija Elbedweihy + + + Khadija Elbedweihy + + + + Khadija Elbedweihy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Gerben Klaas Dirk de Vries + + + Gerben Klaas Dirk de Vries + Gerben Klaas Dirk de Vries + + + + 7b796d33ced4b93447091b2f177ec6a8ba044760 + + + + + + + + + + 54cd3a48f7c3c0d893b06f7de03571968e8d474c + + Jürgen Umbrich + + Jürgen Umbrich + + + Jürgen Umbrich + + + + + + + + + + + + + + Colette Menard + + Colette Menard + + + edf8c59dab500fdfa3415ac72d373057771a7cef + + Colette Menard + + + + + + + + + + + + + + + + + + + + 2d5620c54c5a7a5add5d943131bd53bc99bc0097 + Jooik Jung + + + Jooik Jung + Jooik Jung + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Atul Nautiyal + + + + + + 38a67c25e0d2cf2a2159067aafda02685b4136ab + Atul Nautiyal + Atul Nautiyal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Laura Slaughter + + + + + + Laura Slaughter + Laura Slaughter + + + + + + University of Southampton + University of Southampton + + + + + + University of Southampton + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Scalable + User-friendly + + Smart Office + + + + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + + + + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + Internet of Things (IoT) + + + + + + User-friendly + The Internet of Things (IoT) is starting to take a prevalent role in our daily lives. Smart offices that automatically adapt their environment to make life at the office as pleasant as possible, are slowly becoming reality. +In this paper we present a user-friendly semantic-based smart office platform that allows, through easy configuration, a personalized and comfortable experience at the office. + The Internet of Things (IoT) is starting to take a prevalent role in our daily lives. Smart offices that automatically adapt their environment to make life at the office as pleasant as possible, are slowly becoming reality. +In this paper we present a user-friendly semantic-based smart office platform that allows, through easy configuration, a personalized and comfortable experience at the office. + + Scalable + + + Smart Office + Internet of Things (IoT) + + + + Semantic Platform + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + + Semantic Platform + + + + + + + + + + + + + + + + 2016-10-18T16:15:00 + 2016-10-18T16:30:00 + 2016-10-18T16:15:00 + 2016-10-18T16:30:00 + + 2016-10-18T16:30:00 + Valentina Ivanova + 2016-10-18T16:30:00 + + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + + + + Sapienza Università di Roma + + + Sapienza Università di Roma + + + + Sapienza Università di Roma + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Carlos Pedrinaci + + + Carlos Pedrinaci + + + Carlos Pedrinaci + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Japan Science and Technology Agency + + + + + Japan Science and Technology Agency + + + + Japan Science and Technology Agency + + + + + + + + + + + + + + + + distributional semantic representations + word sense disambiguation + + + + + + Linked Disambiguated Distributional Semantic Networks + + lexical-semantic networks + + + distributional semantic representations + + We present a new hybrid knowledge base that combines the contextual information of distributional models with the conciseness and precision of manually constructed lexical networks. In contrast to dense vector representations, our resource is human readable and interpretable, and can be easily embedded within the Semantic Web ecosystem. Manual evaluation based on human judgments and an extrinsic evaluation on the task of Word Sense Disambiguation both indicate the high quality of the resource, as well as the benefits of enriching top-down lexical knowledge resources with bottom-up distributional information from text. + + + + lexical-semantic networks + + distributional model + + distributional model + We present a new hybrid knowledge base that combines the contextual information of distributional models with the conciseness and precision of manually constructed lexical networks. In contrast to dense vector representations, our resource is human readable and interpretable, and can be easily embedded within the Semantic Web ecosystem. Manual evaluation based on human judgments and an extrinsic evaluation on the task of Word Sense Disambiguation both indicate the high quality of the resource, as well as the benefits of enriching top-down lexical knowledge resources with bottom-up distributional information from text. + + Linked Disambiguated Distributional Semantic Networks + + word sense disambiguation + + + Linked Disambiguated Distributional Semantic Networks + + + + + + + + + + + + + 011c9ba2e167393ebc4af29761b38eee5c02823f + Luca Costabello + + Luca Costabello + + Luca Costabello + + + + + Aldo Gangemi + Aldo + Gangemi + Aldo Gangemi + + Aldo Gangemi + + + + + + 8d7f004803b48a3b7c5e9f73dc16953069a6632d + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nozomu Ohshiro + Nozomu Ohshiro + + + + + + + + + Nozomu Ohshiro + + b25acd7694393c08b907299266201b69e16ed06c + + + + + Oscar Corcho + + Oscar Corcho + Oscar Corcho + + + + + efbd90eca236ae3131e67f30e3abe0a1bceff305 + + + + + + + + + + Recently, a growing number of linguistic resources in different languages have been published and interlinked as part of the Linguistic Linked Open Data (LLOD) cloud. However, in comparison to English and other prominent languages, the presence of Chinese in such a cloud is still limited, despite the fact that Chinese is the most spoken language worldwide. Publishing more Chinese language resources in the LLOD cloud can benefit both academia and industry to better understand the language itself and to further build multilingual applications that will improve the flow of data and services across countries. In this paper, we describe Zhishi.lemon, a newly developed dataset based on the lemon model that constitutes the lexical realization of Zhishi.me, one of the largest Chinese datasets in the Linked Open Data (LOD) cloud. Zhishi.lemon combines the lemon core with the lemon translation module in order to build a linked data lexicon in Chinese with translations into Spanish and English. Links to BabelNet (a vast multilingual encyclopedic resource) have been provided as well. We also present a showcase of this module along with the technical details of transforming Zhishi.me to Zhishi.lemon. We have made the dataset accessible on the Web for both humans (via a Web interface) and software agents (with a SPARQL endpoint). + + + multilingualism + linked data + + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + + + + + + linked data + + translation + Recently, a growing number of linguistic resources in different languages have been published and interlinked as part of the Linguistic Linked Open Data (LLOD) cloud. However, in comparison to English and other prominent languages, the presence of Chinese in such a cloud is still limited, despite the fact that Chinese is the most spoken language worldwide. Publishing more Chinese language resources in the LLOD cloud can benefit both academia and industry to better understand the language itself and to further build multilingual applications that will improve the flow of data and services across countries. In this paper, we describe Zhishi.lemon, a newly developed dataset based on the lemon model that constitutes the lexical realization of Zhishi.me, one of the largest Chinese datasets in the Linked Open Data (LOD) cloud. Zhishi.lemon combines the lemon core with the lemon translation module in order to build a linked data lexicon in Chinese with translations into Spanish and English. Links to BabelNet (a vast multilingual encyclopedic resource) have been provided as well. We also present a showcase of this module along with the technical details of transforming Zhishi.me to Zhishi.lemon. We have made the dataset accessible on the Web for both humans (via a Web interface) and software agents (with a SPARQL endpoint). + multilingualism + + translation + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + + + + + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + + + + 2016-10-19T15:20:00 + 2016-10-19T15:50:00 + 2016-10-19T15:50:00 + 2016-10-19T15:50:00 + Coffee Break + 2016-10-19T15:50:00 + 2016-10-19T15:20:00 + Coffee Break + + + f71357f477f9eac84aafffd63b00f94ef3262ed8 + + + + + + + + + + Mariano Mora-Mcginity + Mariano Mora-Mcginity + Mariano Mora-Mcginity + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Climate Change + Semantic Collaborative Platforms + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + Energy Monitors + + Conserving fossil-based energy to reduce carbon emissions is key to slowing down global warming. The 2015 Paris agreement on climate change emphasised the importance of raising public awareness and participation to address this societal challenge. In this paper we introduce EnergyUse; a social and collaborative platform for raising awareness on climate change, by enabling users to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption information and community generated energy tips as linked data. In this paper we report on the system design, data modelling, platform usage and early deployment with a set of 58 initial participants. We also discuss the challenges, lessons learnt, and future platform developments. + + + Energy Consumption + + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + Conserving fossil-based energy to reduce carbon emissions is key to slowing down global warming. The 2015 Paris agreement on climate change emphasised the importance of raising public awareness and participation to address this societal challenge. In this paper we introduce EnergyUse; a social and collaborative platform for raising awareness on climate change, by enabling users to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption information and community generated energy tips as linked data. In this paper we report on the system design, data modelling, platform usage and early deployment with a set of 58 initial participants. We also discuss the challenges, lessons learnt, and future platform developments. + + + + + + + + Semantic Collaborative Platforms + Climate Change + + Energy Monitors + + + Energy Consumption + + + + + + + + + Keio University + Keio University + + + + + + Keio University + + + + + + + + Kai Lenz + + + + + + Kai Lenz + + Kai Lenz + 1a31d50ae36c97bb6d08ae4adb8c388439eef137 + + + The root of schema violations for RDF data generated from (semi-)structured data, often derives from mappings, which are repeatedly applied and specify how an RDF dataset is generated. The DBpedia dataset, which derives from Wikipedia infoboxes, is no exception. To mitigate the violations, we proposed in previous work to validate the mappings which generate the data, instead of validating the generated data afterwards. In this work, we demonstrate how mappings validation is applied to DBpedia. DBpedia mappings are automatically translated to RML and validated by RDFUnit. The DBpedia mappings assessment can be frequently executed, because it requires significantly less time compared to validating the dataset. The validation results become available via a user-friendly interface. The DBpedia community considers them to refine the DBpedia mappings or ontology and thus, increase the dataset quality. + + DBpedia Mappings Quality Assessment + Data Quality + RML + + + DBpedia + + + + RDFUnit + Linked Data Mapping + + + DBpedia Mappings Quality Assessment + + + DBpedia + Data Quality + RDFUnit + + + + + DBpedia Mappings Quality Assessment + + + + + + + The root of schema violations for RDF data generated from (semi-)structured data, often derives from mappings, which are repeatedly applied and specify how an RDF dataset is generated. The DBpedia dataset, which derives from Wikipedia infoboxes, is no exception. To mitigate the violations, we proposed in previous work to validate the mappings which generate the data, instead of validating the generated data afterwards. In this work, we demonstrate how mappings validation is applied to DBpedia. DBpedia mappings are automatically translated to RML and validated by RDFUnit. The DBpedia mappings assessment can be frequently executed, because it requires significantly less time compared to validating the dataset. The validation results become available via a user-friendly interface. The DBpedia community considers them to refine the DBpedia mappings or ontology and thus, increase the dataset quality. + + + + RML + + + + Linked Data Mapping + + + + + + + + + + ISMB + + + + + + ISMB + + + + + ISMB + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Silvio Peroni, David Shotton and Fabio Vitali + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Jailbreaking your reference lists: the OpenCitations strike again + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + Jailbreaking your reference lists: the OpenCitations strike again + + + + 2016-10-19T10:30:00 + 2016-10-19T11:00:00 + 2016-10-19T11:00:00 + 2016-10-19T11:00:00 + 2016-10-19T11:00:00 + Coffee Break + 2016-10-19T10:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Data generation and publication remain challenging and complicated, in particular for data owners who are not Semantic Web experts or tech-savvy. The situation deteriorates when data from multiple heterogeneous sources, accessed via different interfaces, is integrated, and the Linked Data generation is a long-lasting activity repeated periodically, often adjusted and incrementally enriched with new data. Therefore, we propose the RML Workbench, a graphical user interface to support data owners administrating their Linked Data generation and publication workflow. The RML Workbench’s underlying language is RML, since it allows to declaratively describe the complete Linked Data generation workflow. Thus, any Linked Data generation workflow specified by a user can be exported and reused by other tools interpreting RML. + Linked Data generation and publication remain challenging and complicated, in particular for data owners who are not Semantic Web experts or tech-savvy. The situation deteriorates when data from multiple heterogeneous sources, accessed via different interfaces, is integrated, and the Linked Data generation is a long-lasting activity repeated periodically, often adjusted and incrementally enriched with new data. Therefore, we propose the RML Workbench, a graphical user interface to support data owners administrating their Linked Data generation and publication workflow. The RML Workbench’s underlying language is RML, since it allows to declaratively describe the complete Linked Data generation workflow. Thus, any Linked Data generation workflow specified by a user can be exported and reused by other tools interpreting RML. + + + + + Linked Data Generation + + + + + Towards an Interface for User-Friendly Linked Data Generation Administration + + + + + + Towards an Interface for User-Friendly Linked Data Generation Administration + + RML + R2RML + + + + + Linked Data Workbench + Linked Data Workbench + + Towards an Interface for User-Friendly Linked Data Generation Administration + + Linked Data Generation + + RML + + + + R2RML + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Poster and Demo session + 2016-10-19T18:00:00 + Poster and Demo session + + + + + + + + + + + + + + Daria Stepanova + + Daria Stepanova + + e473527846adf331759e7bb54fcfaa1bce63ceff + + Daria Stepanova + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + 2016-10-19T21:00:00 + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + 2016-10-19T21:00:00 + + Seungjun Yoon, Sejin Chun, Xiongnan Jin and Kyong-Ho Lee + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + 84b76afee6e26404a8a2835641304b1f456acbdd + Martin Ringsquandl + + + Martin Ringsquandl + + + + + + Martin Ringsquandl + + + + + + + + Institut de recherche en Électricité d'Hydro-Québec + + + + Institut de recherche en Électricité d'Hydro-Québec + + + + + Institut de recherche en Électricité d'Hydro-Québec + + + + + + + + + + + + + + + + + + + + + + + + + + + + SQL + + SPARQL + + SPARQL-to-SQL on Internet of Things Databases and Streams + + Query Translation + Web of Things + SQL + + + SPARQL + Internet of Things + To realise a semantic Web of Things, the challenge of achieving efficient Resource Description Format (RDF) storage and SPARQL query performance on Internet of Things (IoT) devices with limited resources has to be addressed. State-of-the-art SPARQL-to-SQL engines have been shown to outperform RDF stores on some benchmarks. In this paper, we describe an optimisation to the SPARQL-to-SQL approach, based on a study of time-series IoT data structures, that employs metadata abstraction and efficient translation by reusing existing SPARQL engines to produce Linked Data `just-in-time'. We evaluate our approach against RDF stores, state-of-the-art SPARQL-to-SQL engines and streaming SPARQL engines, in the context of IoT data and scenarios. We show that storage efficiency, with succinct row storage, and query performance can be improved from 2 times to 3 orders of magnitude. + + To realise a semantic Web of Things, the challenge of achieving efficient Resource Description Format (RDF) storage and SPARQL query performance on Internet of Things (IoT) devices with limited resources has to be addressed. State-of-the-art SPARQL-to-SQL engines have been shown to outperform RDF stores on some benchmarks. In this paper, we describe an optimisation to the SPARQL-to-SQL approach, based on a study of time-series IoT data structures, that employs metadata abstraction and efficient translation by reusing existing SPARQL engines to produce Linked Data `just-in-time'. We evaluate our approach against RDF stores, state-of-the-art SPARQL-to-SQL engines and streaming SPARQL engines, in the context of IoT data and scenarios. We show that storage efficiency, with succinct row storage, and query performance can be improved from 2 times to 3 orders of magnitude. + SPARQL-to-SQL on Internet of Things Databases and Streams + + + Analytics + + Analytics + Web of Things + + Query Translation + Internet of Things + + SPARQL-to-SQL on Internet of Things Databases and Streams + + + + + + + + + + + + + + + + + + + + + + + + + + + + Claudio Gutierrez + + + + + + + + + + f341588c8c974d3b6fa4134e12700da871e8704f + + Claudio Gutierrez + Claudio Gutierrez + + + + LODStats: The Data Web Census Dataset + + + RDF + + Government Data + + + + Over the past years, the size of the Data Web has increased significantly, which makes obtaining general insights into its growth and structure both more challenging and more desirable. The lack of such insights hinders important data management tasks such as quality, privacy and coverage analysis. In this paper, we present LODStats, which provides a comprehensive picture of the current state of a significant part of the Data Web. LODStats integrates RDF datasets from data.gov, publicdata.eu and datahub.io data catalogs and at the time of writing lists over 9 000 RDF datasets. For each RDF dataset, LODStats collects comprehensive statistics and makes these available in adhering to the LDSO vocabulary. This analysis has been regularly published and enhanced over the past four years at the public platform lodstats.aksw.org. We give a comprehensive overview over the resulting dataset. + + Statistics + Linked Data + Linked Data + + + LOD Cloud + + Statistics + LODStats: The Data Web Census Dataset + + RDF + + + LODStats: The Data Web Census Dataset + + + + Government Data + Over the past years, the size of the Data Web has increased significantly, which makes obtaining general insights into its growth and structure both more challenging and more desirable. The lack of such insights hinders important data management tasks such as quality, privacy and coverage analysis. In this paper, we present LODStats, which provides a comprehensive picture of the current state of a significant part of the Data Web. LODStats integrates RDF datasets from data.gov, publicdata.eu and datahub.io data catalogs and at the time of writing lists over 9 000 RDF datasets. For each RDF dataset, LODStats collects comprehensive statistics and makes these available in adhering to the LDSO vocabulary. This analysis has been regularly published and enhanced over the past four years at the public platform lodstats.aksw.org. We give a comprehensive overview over the resulting dataset. + + + LOD Cloud + + + + + + + + + + + + + + + + + + + + + + + + Barry Clarke + + + + + Barry Clarke + + + 2e8dc1d1d979e478fb826f2738e304f2a8256dab + Barry Clarke + + + + + + + + + + Hiroaki Morikawa + + Hiroaki Morikawa + e8763bb27f9128dd304337155d48c07a4b127402 + Hiroaki Morikawa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ca8dd724a7c73e32093d4950de5a8cf337ad9afe + + + + + + Nicolas Seydoux + + Nicolas Seydoux + + + Nicolas Seydoux + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T16:15:00 + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + + + 2016-10-18T16:15:00 + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + 2016-10-18T16:00:00 + 2016-10-18T16:15:00 + 2016-10-18T16:00:00 + + Takeshi Masuda + 2016-10-18T16:15:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Duisburg-Essen + + + + University of Duisburg-Essen + + University of Duisburg-Essen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + David Carral + + + + + David Carral + David Carral + + d2efb52c6d8a90c7503b795bd12a0292d8f2873c + + + + + + + + + + + + + + + + + + + + + + + + + Université du Québec à Montréal + + Université du Québec à Montréal + + Université du Québec à Montréal + + + Tsinghua University + + + + + Tsinghua University + + + + + + Tsinghua University + + + + + + + + + + + Sadahiro Kumagai + + + + Sadahiro Kumagai + + + + + + 6905b3f66c47f4e446ff715499aed5c90bc3fb0c + + + + Sadahiro Kumagai + + + + + + + + + + Matthew Rowe + + Matthew Rowe + + + + e6a0bcfb9bbf3e9449e805ad14142a114bfcfd68 + Matthew Rowe + + + + East China University of Science and Technology + + East China University of Science and Technology + + + + East China University of Science and Technology + + + + + + + + + + + + + + + + + + + + + + + + Haofen Wang + + + + + + + 1f3ad60b6da3589cf3725b33643ea80355f2cb7f + + + Haofen Wang + + + Haofen Wang + + + + + + + + + + + + + + SynerScope B.V. + + + + + + SynerScope B.V. + SynerScope B.V. + + + + + + + + + + + + + + + + + + Vanessa Lopez + + + Vanessa Lopez + + + + + + 5c3ac25297fd6033d663d292004b1e8d977ceb5f + + Vanessa Lopez + + + + + + 931aee58ad9a19b08e6a5e282830add804f5a62a + + Tong Ruan + Tong Ruan + + + + + + + Tong Ruan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Akira Maeda + 885d1aecc2daae8997938994ed7999fa6c11c801 + + + + + Akira Maeda + Akira Maeda + + + Christoph Pinkel + Christoph Pinkel + + 77399f7516b596a8967dd6e41d31de0e66f07846 + + + + + + + + + + Christoph Pinkel + + + + + Improving Open Data Usability through Semantics + 2016-10-18T16:45:00 + 2016-10-18T16:45:00 + + Improving Open Data Usability through Semantics + 2016-10-18T16:45:00 + 2016-10-18T16:30:00 + + 2016-10-18T16:30:00 + Sebastian Neumaier + 2016-10-18T16:45:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Josiane Xavier Parreira + + + + Josiane Xavier Parreira + b69112e39080305b0153178cb8e678adcad0f8d9 + + + + + + + + Josiane Xavier Parreira + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + canonical code + + graph search + + + frequent association pattern mining + + + + + canonical code + distance oracle + + association finding + + frequent association pattern mining + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + + Finding associations between entities is a common information need in many areas. It has been facilitated by the increasing amount of graph-structured data on the Web describing relations between entities. In this paper, we define an association connecting multiple entities in a graph as a minimal connected subgraph containing all of them. We propose an efficient graph search algorithm for finding associations, which prunes the search space by exploiting distances between entities computed based on a distance oracle. Having found a possibly large group of associations, we propose to mine frequent association patterns as a conceptual abstract summarizing notable subgroups to be explored, and present an efficient mining algorithm based on canonical codes and partitions. Extensive experiments on large, real RDF datasets demonstrate the efficiency of the proposed algorithms. + distance oracle + Finding associations between entities is a common information need in many areas. It has been facilitated by the increasing amount of graph-structured data on the Web describing relations between entities. In this paper, we define an association connecting multiple entities in a graph as a minimal connected subgraph containing all of them. We propose an efficient graph search algorithm for finding associations, which prunes the search space by exploiting distances between entities computed based on a distance oracle. Having found a possibly large group of associations, we propose to mine frequent association patterns as a conceptual abstract summarizing notable subgroups to be explored, and present an efficient mining algorithm based on canonical codes and partitions. Extensive experiments on large, real RDF datasets demonstrate the efficiency of the proposed algorithms. + + association finding + + + graph search + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T09:30:00 + 2016-10-19T10:30:00 + 2016-10-19T10:30:00 + 2016-10-19T10:30:00 + 2016-10-19T09:30:00 + Keynote: Kathleen McKeown + Keynote: Kathleen McKeown + 2016-10-19T10:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ATOS + + ATOS + ATOS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Manel Achichi + Manel Achichi + + + + + + + + + Manel Achichi + + + + 1cc5d51633bd12fad9caacc78bcf32ae8a2fb134 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pankesh Patel + + + Pankesh Patel + + c1c532c21238edc831e152818f39ea5e497e746d + + Pankesh Patel + + + + + + + + + + + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T15:30:00 + Special Event Session + Special Event Session + + + + + + + + + + + + + + + + + + + + + + + + Université catholique de Louvain + Université catholique de Louvain + + + + + Université catholique de Louvain + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Technische Universität Dresden + + + + + + + Technische Universität Dresden + Technische Universität Dresden + + + + + + + + + + + + Laura M. Daniele + + Laura M. Daniele + af5845f37a1867a8bc788e4f539dddcd5dd2b33e + + + + + + Laura M. Daniele + + + + Leibniz Universität Hannover + Leibniz Universität Hannover + + + + Leibniz Universität Hannover + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fabian Flöck + + + + + Fabian Flöck + + Fabian Flöck + + + + + + + + b805260db1b72fb6954433eec4ef2c406de2dac2 + + + + + Taisuke Kimura + + + Taisuke Kimura + + + Taisuke Kimura + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Politecnico di Bari + + Politecnico di Bari + + Politecnico di bari + + Politecnico di bari + + Politecnico di Bari + + + + Politecnico di bari + + + + + + + + + + + + + + + + + + + + + + + + + + This paper presents LIXR, a system for converting between RDF and XML. LIXR is based on domain-specific language embedded into the Scala programming language. It supports the definition of transformations of datasets from RDF to XML in a declarative fashion, while still maintaining the flexibility of a full programming language environment. We directly compare this system to other systems programmed in Java and XSLT and show that the LIXR implementations are significantly shorter in terms of lines of code, in addition to being bidirectional and conceptually simple to understand. + + RDF + RDF + LIXR: Quick, succinct conversion of XML to RDF + + LIXR: Quick, succinct conversion of XML to RDF + + Scala + XML + + + format conversion + This paper presents LIXR, a system for converting between RDF and XML. LIXR is based on domain-specific language embedded into the Scala programming language. It supports the definition of transformations of datasets from RDF to XML in a declarative fashion, while still maintaining the flexibility of a full programming language environment. We directly compare this system to other systems programmed in Java and XSLT and show that the LIXR implementations are significantly shorter in terms of lines of code, in addition to being bidirectional and conceptually simple to understand. + + Scala + LIXR: Quick, succinct conversion of XML to RDF + format conversion + + + + XML + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 556254b522c8fa9cf8561376c21139d7c5894287 + Neil Wilson + Neil Wilson + + + + + Neil Wilson + + + + + + + + + + + + The recently proposed Triple Pattern Fragment (TPF) interface aims at increasing the availability of Web-queryable RDF datasets by trading off an increased client-side query processing effort for a significant reduction of server load. However, an additional aspect of this trade-off is a very high network load. To mitigate this drawback we propose to extend the interface by allowing clients to augment TPF requests with a VALUES clause as introduced in SPARQL 1.1. In an ongoing research project we study the trade-offs of such an extended TPF interface and compare it to the pure TPF interface. With a poster in the conference we aim to present initial results of this research. In particular, we would like to present a series of experiments showing that a distributed, bind-join-based query execution using this extended interface can reduce the network load drastically (in terms of both the number of HTTP requests and data transfer). + Linked Data Fragments + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + Query Endpoints + + + The recently proposed Triple Pattern Fragment (TPF) interface aims at increasing the availability of Web-queryable RDF datasets by trading off an increased client-side query processing effort for a significant reduction of server load. However, an additional aspect of this trade-off is a very high network load. To mitigate this drawback we propose to extend the interface by allowing clients to augment TPF requests with a VALUES clause as introduced in SPARQL 1.1. In an ongoing research project we study the trade-offs of such an extended TPF interface and compare it to the pure TPF interface. With a poster in the conference we aim to present initial results of this research. In particular, we would like to present a series of experiments showing that a distributed, bind-join-based query execution using this extended interface can reduce the network load drastically (in terms of both the number of HTTP requests and data transfer). + + + Query Endpoints + + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + + Query Processing + Triple Pattern Fragments + + + Triple Pattern Fragments + Linked Data Fragments + + + + + Query Processing + + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + + + + + + + + + + + + + Algebraic calculi for weighted ontology alignments + + + relaxed taxonomic relations + + + + Alignments between ontologies usually come with numerical attributes expressing the confidence of each correspondence. Semantics supporting such confidences must generalise the semantics of alignments without confidence. There exists a semantics which satisfies this but introduces a discontinuity between weighted and non-weighted interpretations. Moreover, it does not provide a calculus for reasoning with weighted ontology alignments. This paper introduces a calculus for such alignments. It is given by an infinite relation-type algebra, the elements of which are weighted taxonomic relations. In addition, it approximates the non-weighted case in a continuous manner. + + + Algebraic calculi for weighted ontology alignments + + + + relation algebra + algebraic calculus + algebraic calculus + relaxed taxonomic relations + + + weighted ontology alignment + + weighted ontology alignment + + Algebraic calculi for weighted ontology alignments + relation algebra + + + Alignments between ontologies usually come with numerical attributes expressing the confidence of each correspondence. Semantics supporting such confidences must generalise the semantics of alignments without confidence. There exists a semantics which satisfies this but introduces a discontinuity between weighted and non-weighted interpretations. Moreover, it does not provide a calculus for reasoning with weighted ontology alignments. This paper introduces a calculus for such alignments. It is given by an infinite relation-type algebra, the elements of which are weighted taxonomic relations. In addition, it approximates the non-weighted case in a continuous manner. + + + + + + + + + + + + + + + + + + + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + Smart Planet + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + Smart Planet + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T10:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Linked Data + 2016-10-20T11:50:00 + Linked Data + 2016-10-20T10:30:00 + + + Paolo Ciancarini + + + + + Paolo Ciancarini + + + + + Paolo Ciancarini + + + 769c68be495857d926488ef699ee50b2e7c762b0 + + + + + + + + + + + + + + + Information retrieval using Semantic Web Technology + Information retrieval using Semantic Web Technology + + + Artificial intelligence technique for the Semantic Web + Semantics in the knowledge bases (e.g. Linked Open Data) + Semantics spread in large-scale knowledge bases can be used to intermediate heterogeneous users’ activity logs distributed in services; it can improve applications that assist users to decide next activities across services. Since user activities can be represented in terms of re- lationships involving three or more things (e.g. a user tags movie items on a webpage), they can be represented as a tensor. The recent semantic sensitive tensor factorization (SSTF) is promising since it achieves high accuracies in predicting users’ activities by applying semantics behind objects (e.g. item categories) to tensor factorization. However, SSTF fo- cuses on the factorization of data logs from a single service and thus has two problems: (1) the balance problem caused when simultaneously han- dling heterogeneous datasets and (2) the sparcity problem caused when there are insufficient data logs within a single service. Our solution, Se- mantic Sensitive Simultaneous Tensor Factorization (S3TF), tackles the above problems by: (1) It creates tensors for individual services and fac- torizes those tensors simultaneously; it does not force to create a tensor from multiple services and factorize the single tensor. This avoids low prediction results caused by the balance problem. (2) It utilizes shared semantics behind distributed logs and gives semantic biases to each ten- sor factorization. This avoids the sparsity problem by using the shared se- mantics among services. Experiments using the real-world datasets show that S3TF achieves up to 13% higher accuracy in rating predictions than the current best tensor method. It also extracts implicit relationships across services in the feature spaces by simultaneouse factorization. + Prediction methods of users' activities + Recommender systems using semantics in the knowledge bases + Prediction methods of users' activities + + Semantic Sensitive Simultaneous Tensor Factorization + + Semantic Sensitive Simultaneous Tensor Factorization + Tensor Factorization using semantic knowledge + + Semantics spread in large-scale knowledge bases can be used to intermediate heterogeneous users’ activity logs distributed in services; it can improve applications that assist users to decide next activities across services. Since user activities can be represented in terms of re- lationships involving three or more things (e.g. a user tags movie items on a webpage), they can be represented as a tensor. The recent semantic sensitive tensor factorization (SSTF) is promising since it achieves high accuracies in predicting users’ activities by applying semantics behind objects (e.g. item categories) to tensor factorization. However, SSTF fo- cuses on the factorization of data logs from a single service and thus has two problems: (1) the balance problem caused when simultaneously han- dling heterogeneous datasets and (2) the sparcity problem caused when there are insufficient data logs within a single service. Our solution, Se- mantic Sensitive Simultaneous Tensor Factorization (S3TF), tackles the above problems by: (1) It creates tensors for individual services and fac- torizes those tensors simultaneously; it does not force to create a tensor from multiple services and factorize the single tensor. This avoids low prediction results caused by the balance problem. (2) It utilizes shared semantics behind distributed logs and gives semantic biases to each ten- sor factorization. This avoids the sparsity problem by using the shared se- mantics among services. Experiments using the real-world datasets show that S3TF achieves up to 13% higher accuracy in rating predictions than the current best tensor method. It also extracts implicit relationships across services in the feature spaces by simultaneouse factorization. + Recommender systems using semantics in the knowledge bases + Semantics in the knowledge bases (e.g. Linked Open Data) + + + + Tensor Factorization using semantic knowledge + Semantic Sensitive Simultaneous Tensor Factorization + Artificial intelligence technique for the Semantic Web + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Wouter Beek + fc25e939e3bd6e57bbfa027d6546f3116e632492 + + + Wouter Beek + + + + + + Wouter Beek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc1d47b43c3d22ef49202d417a7cf655057bcfd7 + + + Evgeny Kharlamov + + + + Evgeny Kharlamov + + + + + + + Evgeny Kharlamov + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:00:00 + 2016-10-18T11:00:00 + + + 2016-10-18T11:15:00 + Entity-Relationship Extraction from Wikipedia Unstructured Text + Entity-Relationship Extraction from Wikipedia Unstructured Text + 2016-10-18T11:15:00 + 2016-10-18T11:15:00 + + Radityo Eko Prasojo + 2016-10-18T11:15:00 + + + + Ontology Engineering Group, Universidad Politécnica de Madrid + + + + + Ontology Engineering Group, Universidad Politécnica de Madrid + Ontology Engineering Group, Universidad Politécnica de Madrid + + + + + + + + + INSA of Toulouse, LAAS-CNRS + + INSA of Toulouse, LAAS-CNRS + + + + + INSA of Toulouse, LAAS-CNRS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Laurens De Graeve + + + + Laurens De Graeve + + + Laurens De Graeve + + e2c5d68771e49a539531943d0354069684316f0f + + + + 2016-10-21T11:50:00 + + + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + + Exception-enriched Rule Learning from Knowledge Graphs + Mohamed H. Gad-Elrab, Daria Stepanova, Jacopo Urbani and Gerhard Weikum + 2016-10-21T11:50:00 + 2016-10-21T11:30:00 + 2016-10-21T11:30:00 + Exception-enriched Rule Learning from Knowledge Graphs + + + + + + + + + + d7dd4d1804ac29dc9ee2788b83bf23362939763e + + + + + + + Shinichiro Tago + + + + Shinichiro Tago + Shinichiro Tago + + + + + + + + + + + + + + + + + + + + + + + + UEC, The University of Electro-Communications + + + UEC, The University of Electro-Communications + + + UEC, The University of Electro-Communications + + + + + + + + + + Eric Prud'Hommeaux + Eric Prud'Hommeaux + Eric Prud'Hommeaux + + + + + + + + e2d67791b2a0ce3441c0c770f94daa130b4e6d95 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Ulm + + + + + + + + + University of Ulm + + University of Ulm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Christophe Gravier + + Christophe Gravier + + + + Christophe Gravier + + + + + + + + af583d240b4536b3ead4ec62a1b46a8e5456e659 + + + + LT2C Claude Chappe + + + + LT2C Claude Chappe + LT2C Claude Chappe + + + + + + + + + + + + Tianjin University + + + + + + Tianjin University + + + Tianjin University + + + + + + + Robert Piro + + + + + + Robert Piro + a7cda19d058934ea762bdfb6b875610ec4dbaed9 + + + + Robert Piro + + + + + + + + + + + + + + + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + 2016-10-19T21:00:00 + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Pankesh Patel, Amelie Gyrard, Dhavalkumar Thakker, Amit Sheth and Martin Serrano + + + + + + + + + + 2016-10-20T14:10:00 + Operator-aware approach for boosting performance in RDF stream processing + Operator-aware approach for boosting performance in RDF stream processing + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + + + 2016-10-20T14:30:00 + 2016-10-20T14:10:00 + Danh Le-Phuoc + + + + Universität zu Lübeck, Institut für Informations Systeme + + + Universität zu Lübeck, Institut für Informations Systeme + + + + + Universität zu Lübeck, Institut für Informations Systeme + + + + + + + + + + + + + + + + + + + + + + + LIRMM + + LIRMM + LIRMM + + + + + + + + + + + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Damien Graux, Louis Jachiet, Pierre Geneves and Nabil Layaida + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + 2016-10-20T16:10:00 + 2016-10-20T15:50:00 + 2016-10-20T16:10:00 + + + 2016-10-20T15:50:00 + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + Evgeny Kharlamov, Yannis Kotidis, Theofilos Mailis, Christian Neuenstadt, Charalampos Nikolaou, Özgür Lütfü Özcep, Christoforos Svingos, Dmitriy Zheleznyakov, Steffen Lamparter, Ian Horrocks, Yannis Ioannidis and Ralf Möller + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + + + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + + Andrea Mauri, Jean-Paul Calbimonte, Daniele Dell'Aglio, Marco Balduini, Marco Brambilla, Emanuele Della Valle and Karl Aberer + + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:30:00 + + TripleWave: Spreading RDF Streams on the Web + 2016-10-20T14:50:00 + TripleWave: Spreading RDF Streams on the Web + + + + + + + + + + + Eirik Bakke + Eirik Bakke + + Eirik Bakke + + + + + + + + + + + + + + + + + Thanassis Tiropanis + + + + + + + + 9241d288757ff5c800606b716ae9c96f87c66493 + Thanassis Tiropanis + + + + + Thanassis Tiropanis + + + + + + + + + + + + CNRS + CNRS + CNRS + + + + + + + + + + + + + + + + + + + + + 183ab288609caaa35e87d388df4443c568eb103f + + + + + Khai Nguyen + Khai Nguyen + + + + + Khai Nguyen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kno.e.sis Center, Wright State University + Kno.e.sis Center, Wright State University + + Kno.e.sis Center, Wright State University + + + + + + + + + + + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Gize: A Time Warp in the Web of Data + 2016-10-19T18:00:00 + Valeria Fionda, Melisachew Wudage Chekol and Giuseppe Pirrò + Gize: A Time Warp in the Web of Data + + + + + + + + + + + + + + + + + + + + + David Purcell + David Purcell + efc7e1ff9cf1a36a560b85f1ec91e16a7139a731 + + + + + + David Purcell + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T12:00:00 + 2016-10-19T12:20:00 + Giuseppe Loseto, Saverio Ieva, Filippo Gramegna, Michele Ruta, Floriano Scioscia and Eugenio Di Sciascio + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + + 2016-10-19T12:00:00 + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + + + 2016-10-19T12:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Yasar Khan + + + + + + + + + Yasar Khan + + + Yasar Khan + + + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Hassan Saif, Miriam Fernandez, Matthew Rowe and Harith Alani + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + + Valeria Fionda + + f29ec74ebe2918cd0732a899e1949c503a144647 + + + + + + + + Valeria Fionda + + Valeria Fionda + + + + + Yuting Song + Yuting Song + + 3804e24b42c3292daac88d05a49accd8010b5f44 + + + + + Yuting Song + + + + + + + + 2016-10-19T21:00:00 + DBpedia Entity Type Inference Using Categories + 2016-10-19T18:00:00 + Lu Fang, Qingliang Miao and Yao Meng + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + DBpedia Entity Type Inference Using Categories + + + + + + Michel Buffa + + + + + Michel Buffa + + + + + + Michel Buffa + + + d50a9bdc107cdf7b3e32a131655b4d7f577a33c6 + + + + 8a8535067464904d74264b02f2e2dce905730d37 + + + + + + + Junzhao Zhang + Junzhao Zhang + + + + Junzhao Zhang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Stuttgart Media University + + + + Stuttgart Media University + + + + + Stuttgart Media University + + + + 2016-10-19T21:00:00 + + Who-Does-What: A knowledge base of people's occupations and job activities + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Who-Does-What: A knowledge base of people's occupations and job activities + 2016-10-19T18:00:00 + Jonas Bulegon Gassen, Stefano Faralli, Simone Paolo Ponzetto and Jan Mendling + + + + + + + + In this demo we present the SOMM system that resulted from an ongoing collaboration between Siemens and the University of Oxford. The goal of this collaboration is to facilitate design and management of ontologies that capture conceptual information models underpinning various industrial applications. SOMM supports engineers with little background on semantic technologies in the creation of such ontologies and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for ontology integration. We demonstrate functionality of SOMM on two scenarios from energy and manufacturing domains. + SOMM: Industry Oriented Ontology Management Tool + + reasoning + In this demo we present the SOMM system that resulted from an ongoing collaboration between Siemens and the University of Oxford. The goal of this collaboration is to facilitate design and management of ontologies that capture conceptual information models underpinning various industrial applications. SOMM supports engineers with little background on semantic technologies in the creation of such ontologies and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for ontology integration. We demonstrate functionality of SOMM on two scenarios from energy and manufacturing domains. + + + + + constraints + + + + + industry + SOMM: Industry Oriented Ontology Management Tool + + reasoning + + + + ontologies + + + + information models + + + + + + + information models + + + constraints + industry + ontologies + SOMM: Industry Oriented Ontology Management Tool + + + + + + + + + + + + + + + + + + Wei Hu + + + + + + Wei Hu + + + + + Wei Hu + f7074d05b74deb43ec150671bb9b226578d20f2b + + + + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + + + Capturing Industrial Information Models with Ontologies and Constraints + + Capturing Industrial Information Models with Ontologies and Constraints + Evgeny Kharlamov, Bernardo Cuenca Grau, Ernesto Jimenez-Ruiz, Steffen Lamparter, Gulnar Mehdi, Martin Ringsquandl, Yavor Nenov, Sebastian Brandt and Ian Horrocks + 2016-10-20T15:30:00 + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + Ontologies (II) + 2016-10-21T13:30:00 + Ontologies (II) + + + 2016-10-20T11:30:00 + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + Muhammad Saleem, Yasar Khan, Ali Hasnain, Ivan Ermilov and Axel-Cyrille Ngonga Ngomo + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + + 2016-10-20T11:30:00 + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + 2016-10-20T11:50:00 + + + 2016-10-20T11:50:00 + + + + + Science & Technology Research Laboratories, Japan Broadcasting Corporation(NHK) + + + + + + + Science & Technology Research Laboratories, Japan Broadcasting Corporation(NHK) + Science & Technology Research Laboratories, Japan Broadcasting Corporation(NHK) + + + + + + + + + + + + + + + + + + + + + + + + + Amith P. Sheth + Amith P. Sheth + + + Amith P. Sheth + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Changlong Wang, Xiaowang Zhang and Zhiyong Feng + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Structure-guiding Modular Reasoning for Expressive Ontologies + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Structure-guiding Modular Reasoning for Expressive Ontologies + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + Meghyn Bienvenu + Meghyn Bienvenu + + + + + + + + + + 7553a4556916b7c26b0955d745eec61f9aea5be5 + + + Meghyn Bienvenu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pontificia Universidad Catolica de Chile + + Pontificia Universidad Catolica de Chile + + + + Pontificia Universidad Catolica de Chile + Pontificia Universidad Católica de Chile + Pontificia Universidad Católica de Chile + + + + Pontificia Universidad Católica de Chile + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + David Martin and Peter Patel-Schneider + 2016-10-19T18:00:00 + + EXISTStential Aspects of SPARQL + + + EXISTStential Aspects of SPARQL + 2016-10-19T21:00:00 + + + Chetana Gavankar + e0a256306b228712f1e0f37eb0115e5f70faf13f + + Chetana Gavankar + + Chetana Gavankar + + + + + + + + + + + + + + TU Darmstadt, Germany + + + TU Darmstadt, Germany + + + + + TU Darmstadt, Germany + + + + + + + Andrea Splendiani + + + Andrea Splendiani + + + + + + Andrea Splendiani + d2b55d1cadb293d73c649c9aa834f3a15852fe91 + + + + Metadata. + + + Scholarly Data + Academic publishers, such as Springer Nature, annotate scholarly products with the appropriate research topics and keywords to facilitate the marketing process and to support (digital) libraries and academic search engines. This critical process is usually handled manually by experienced editors, leading to high costs and slow throughput. In this demo paper, we present Smart Topic Miner (STM), a semantic application designed to support the Springer Nature Computer Science editorial team in classifying scholarly publications. STM analyses conference proceedings and annotates them with a set of topics drawn from a large automatically generated ontology of research areas and a set of tags from Springer Nature Classification. + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + + + Ontology Learning + + Scholarly Data + Conference Proceedings + Metadata. + + + + Data Mining + + Bibliographic Data + + Conference Proceedings + + Ontology Learning + + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + + Bibliographic Data + Data Mining + + + Scholarly Ontologies + Academic publishers, such as Springer Nature, annotate scholarly products with the appropriate research topics and keywords to facilitate the marketing process and to support (digital) libraries and academic search engines. This critical process is usually handled manually by experienced editors, leading to high costs and slow throughput. In this demo paper, we present Smart Topic Miner (STM), a semantic application designed to support the Springer Nature Computer Science editorial team in classifying scholarly publications. STM analyses conference proceedings and annotates them with a set of topics drawn from a large automatically generated ontology of research areas and a set of tags from Springer Nature Classification. + + Scholarly Ontologies + + + + + + + + + + + + + + + + Ian Harrow + + + Ian Harrow + + + + fb7c2d632e58ae40666baaabe677b8bb91edabcc + + + + Ian Harrow + + + + + + + + + + + Shuya Abe + 084acd609d8ef10c7b09810da9f23fb55d20ea61 + + + + Shuya Abe + + + Shuya Abe + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kartik Asooja + Kartik Asooja + + + + + Kartik Asooja + + + + + + Carlos Buil Aranda + + + Carlos Buil Aranda + + + + + + 250a676d59d5e43d3d41c99fa31c016132e9ee0f + + + + Carlos Buil Aranda + + + + 2016-10-20T15:30:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + Search (II) + Search (II) + 2016-10-20T16:50:00 + 2016-10-20T15:30:00 + + + + a5d5374c8d420b63cec2aca4e83499227f8a56cd + + + + + + An Jacobs + + An Jacobs + + An Jacobs + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + Takeshi Morita, Yu Sugawara, Ryota Nishimura and Takahira Yamaguchi + + 2016-10-19T18:00:00 + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Femke Ongenae + + + + + + c979080c0d162570600ee44eed83bfe0fe9c69b6 + + + Femke Ongenae + Femke Ongenae + + + + + + + + + + Nabil Layaida + + Nabil Layaida + + e1ab6c0bc3461716961f51ca8ff5d1fe192258b0 + + + + Nabil Layaida + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + SQuaRE: A Visual Tool For Creating R2RML Mappings + + 2016-10-19T18:00:00 + + Michał Blinkiewicz and Jaroslaw Bak + SQuaRE: A Visual Tool For Creating R2RML Mappings + + + + + Jing Mei + Jing Mei + Jing Mei + d55031f4add439f198f6968ca1bc1b916806cbff + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + In the recent years, several approaches for machine learning on the Semantic Web have been proposed. However, no extensive comparisons between those approaches have been undertaken, in particular due to a lack of publicly available, acknowledged benchmark datasets. In this paper, we present a collection of 22 benchmark datasets at different sizes, derived from existing Semantic Web datasets as well as from external classification and regression problems linked to datasets in the Linked Open Data cloud. Such a collection of datasets can be used to conduct qualitative performance testing and systematic comparisons of approaches. + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + Linked Open Data + Benchmarking + In the recent years, several approaches for machine learning on the Semantic Web have been proposed. However, no extensive comparisons between those approaches have been undertaken, in particular due to a lack of publicly available, acknowledged benchmark datasets. In this paper, we present a collection of 22 benchmark datasets at different sizes, derived from existing Semantic Web datasets as well as from external classification and regression problems linked to datasets in the Linked Open Data cloud. Such a collection of datasets can be used to conduct qualitative performance testing and systematic comparisons of approaches. + Machine Learning + + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + Datasets + + Machine Learning + + Benchmarking + + + + + + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + Datasets + + + + + Linked Open Data + + + + Andrea Giovanni Nuzzolese + + Nuzzolese + + + + Andrea Giovanni + Andrea Giovanni Nuzzolese + + + + + 87707a356b60f036a079c5268236791fbab9f85e + + Andrea Giovanni Nuzzolese + + + + 37d41901db3bb687d255ca09084c4ada551096c2 + + + Pieter Heyvaert + + + Pieter Heyvaert + + + + + + Pieter Heyvaert + + + + + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + Ran Yu, Besnik Fetahu, Ujwal Gadiraju and Stefan Dietze + A Survey on Challenges in Web Markup Data for Entity Retrieval + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + A Survey on Challenges in Web Markup Data for Entity Retrieval + 2016-10-19T21:00:00 + + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + + Isa Guclu, Yuan-Fang Li, Jeff Z. Pan and Martin J. Kollingbaum + 2016-10-19T14:20:00 + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + 2016-10-19T14:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Luca Costabello, Pierre-Yves Vandenbussche, Gofran Shukair, Corine Deliot and Neil Wilson + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + + 7c02091cc31fe5d2006aba1f9167fbd5cd28fef3 + + Ruben Taelman + Ruben Taelman + + Ruben Taelman + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Data Integration for the Media Value Chain + + Henning Agt-Rickauer, Jörg Waitelonis, Tabea Tietz and Harald Sack + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Data Integration for the Media Value Chain + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Aitor Soroa + Aitor Soroa + + Aitor Soroa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 92ea611cf55f95a0ffd94eca818bb9d8a3f9a735 + + + + + + Manolis Koubarakis + Manolis Koubarakis + + + Manolis Koubarakis + + + + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + Querying/SPARQL (II) + 2016-10-21T14:50:00 + Querying/SPARQL (II) + 2016-10-21T13:30:00 + + + + What if machines could be creative? + In this demo proposal, we present a system that proposes generations of existing concepts such as "cars that park automatically"or "skyscrapers made of glass". + + + + description logics + + concept + concept + creativity + creativity + What if machines could be creative? + invention + description logics + + + + + + + + + invention + In this demo proposal, we present a system that proposes generations of existing concepts such as "cars that park automatically"or "skyscrapers made of glass". + + + What if machines could be creative? + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + Thomas Wilmering and Mark B. Sandler + 2016-10-19T21:00:00 + + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Jongmin Lee, Youngkyoung Ham and Tony Lee + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + f7c78787302ad536b4811448194b9f8b47bfcf86 + + Manuel Atencia + + Manuel Atencia + + + + + + Manuel Atencia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ryutaro Ichise + Ryutaro Ichise + 0a09da4b327971fff4a6573caac2b4872cab5cc9 + + + + + + + + + Ryutaro Ichise + + + + + + + + + Ahmet Soylu + + + + + + Ahmet Soylu + bf7b4e41f5b18d0964cb5901e4a74a08d6f2b10b + + Ahmet Soylu + + + + Christian Bizer + Christian Bizer + + Christian Bizer + + + + + + + + + d293ced5ef76989393dc5a8380fb9b2c89c1f083 + + + + + + + + + + + + + + Personalized robot interactions to intercept behavioral disturbances of people with dementia + 2016-10-19T18:00:00 + Femke Ongenae, Femke De Backere, Jelle Nelis, Stijn De Pestel, Christof Mahieu, Shirley Elprama, Charlotte Jewell, An Jacobs, Pieter Simoens and Filip De Turck + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + 2016-10-19T21:00:00 + Personalized robot interactions to intercept behavioral disturbances of people with dementia + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vitaveska Lanfranchi + + + + + + Vitaveska Lanfranchi + + + + + + + Vitaveska Lanfranchi + + + + + + + + + + + + + + + + 2016-10-20T11:50:00 + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + Lunch + 2016-10-20T11:50:00 + Lunch + + + + + + + Semantic Web data management raises the challenge of answering queries under constraints (i.e., in the presence of implicit data). To bridge the gap between this extended setting and that of query evaluation provided by database engines, a reasoning step (w.r.t. the constraints) is necessary before query evaluation. A large and useful set of ontology languages enjoys FOL reducibility of query answering: queries can be answered by evaluating a SQLized first-order logic (FOL) formula (obtained from the query and the ontology) directly against the explicitly stored data (i.e., without considering the ontological constraints). +Our demonstration showcases to the attendees, and analyzes, the performance of several reformulation-based query answering techniques, including one we recently devised, applied to the lightweight description logic DL-LiteR underpinning the W3C’s OWL2 QL profile. + Semantic Web data management raises the challenge of answering queries under constraints (i.e., in the presence of implicit data). To bridge the gap between this extended setting and that of query evaluation provided by database engines, a reasoning step (w.r.t. the constraints) is necessary before query evaluation. A large and useful set of ontology languages enjoys FOL reducibility of query answering: queries can be answered by evaluating a SQLized first-order logic (FOL) formula (obtained from the query and the ontology) directly against the explicitly stored data (i.e., without considering the ontological constraints). +Our demonstration showcases to the attendees, and analyzes, the performance of several reformulation-based query answering techniques, including one we recently devised, applied to the lightweight description logic DL-LiteR underpinning the W3C’s OWL2 QL profile. + Query optimization + Optimizing FOL reducible query answering: understanding performance challenges + + Query answering + + DL-Lite + + + Optimizing FOL reducible query answering: understanding performance challenges + + Query optimization + Query answering + + + Optimizing FOL reducible query answering: understanding performance challenges + FOL query reformulation + FOL query reformulation + + DL-Lite + + + + + + + + + + + + + + + + + + + + + + + + + Stefan Schlobach + a9f51c1c74b52bff440f635430f761876fa8b83b + + + + Stefan Schlobach + + + Stefan Schlobach + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Lowering knowledge : Making constrained devices semantically interoperable + + + + 2016-10-19T21:00:00 + Lowering knowledge : Making constrained devices semantically interoperable + Nicolas Seydoux, Khalil Drira, Nathalie Hernandez and Thierry Monteil + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Simone Paolo Ponzetto + + + Simone Paolo Ponzetto + Simone Paolo Ponzetto + 8fb5b6bef5aaeb67a43acb4ea8924f144a03d1f8 + + + + + + + + + + + + + + Suresh Alse + + + + + Suresh Alse + + + Suresh Alse + 22c36eea38557c8acf092a86cf1f130734f7a822 + + + + + + + + + + + + + + + + + + + + + + + + + + Victor Felder + + + + + + + + + + Victor Felder + Victor Felder + b343d8eb73f38f07f8c0dadbe1c867f8f4dcdd81 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Knowledge Media Institute + Knowledge Media Institute + + + + + Knowledge Media Institute + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + linked data + linked data + Yuzu: Publishing Any Data as Linked Data + + + + data conversion + + + Yuzu: Publishing Any Data as Linked Data + data frontend + Yuzu: Publishing Any Data as Linked Data + Linked data is one of the most important methods for improving the applicability of data, however most data is not in linked data formats and raising it to linked data is still a significant challenge. We present Yuzu, an application that makes it easy to host legacy data in JSON, XML or CSV as linked data, while providing a clean interface with advanced features. The ease-of-use of this framework is shown by its adoption for a number of existing datasets including WordNet. + + data frontend + data conversion + + Linked data is one of the most important methods for improving the applicability of data, however most data is not in linked data formats and raising it to linked data is still a significant challenge. We present Yuzu, an application that makes it easy to host legacy data in JSON, XML or CSV as linked data, while providing a clean interface with advanced features. The ease-of-use of this framework is shown by its adoption for a number of existing datasets including WordNet. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ermyas Abebe + + a4132a2b9ab88c52ef4412fcd79d24e2fb4a0efe + + Ermyas Abebe + + + Ermyas Abebe + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vlad Posea + + Vlad Posea + + + + Vlad Posea + + + + + + + 10e096a0c0842fea011fb61ff468f90deb161a7c + + + + + + + + + + + + + + + + + + Data Mining + + + + + + RDF2Vec: RDF Graph Embeddings for Data Mining + Graph Embeddings + Linked Open Data + Graph Embeddings + + + Linked Open Data has been recognized as a valuable source for background information in data mining. However, most data mining tools require features in propositional form, i.e., a vector of nominal or numerical features associated with an instance, while Linked Open Data sources are graphs by nature. In this paper, we present RDF2Vec, an approach that uses language modeling approaches for unsupervised feature extraction from sequences of words, and adapts them to RDF graphs. We generate sequences by leveraging local information from graph sub-structures, harvested by Weisfeiler-Lehman Subtree RDF Graph Kernels and graph walks, and learn latent numerical representations of entities in RDF graphs. Our evaluation shows that such vector representations outperform existing techniques for the propositionalization of RDF graphs on a variety of different predictive machine learning tasks, and that feature vector representations of general knowledge graphs such as DBpedia and Wikidata can be easily reused for different tasks. + RDF2Vec: RDF Graph Embeddings for Data Mining + + + Data Mining + RDF2Vec: RDF Graph Embeddings for Data Mining + Linked Open Data + Linked Open Data has been recognized as a valuable source for background information in data mining. However, most data mining tools require features in propositional form, i.e., a vector of nominal or numerical features associated with an instance, while Linked Open Data sources are graphs by nature. In this paper, we present RDF2Vec, an approach that uses language modeling approaches for unsupervised feature extraction from sequences of words, and adapts them to RDF graphs. We generate sequences by leveraging local information from graph sub-structures, harvested by Weisfeiler-Lehman Subtree RDF Graph Kernels and graph walks, and learn latent numerical representations of entities in RDF graphs. Our evaluation shows that such vector representations outperform existing techniques for the propositionalization of RDF graphs on a variety of different predictive machine learning tasks, and that feature vector representations of general knowledge graphs such as DBpedia and Wikidata can be easily reused for different tasks. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université de Fribourg + Université de Fribourg + + Université de Fribourg + + + + + + + + + + + + + + + + + Accenture Technology Labs / INRIA + + + + + + + Accenture Technology Labs / INRIA + + Accenture Technology Labs / INRIA + + + + + + + + + John Vard + + + + 05ea0b74ea3b81b7481a0913163891d3cb6b3787 + + John Vard + + + + John Vard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The time of answering a SPARQL query with its all exact solutions in large scale RDF dataset possibly exceeds users' tolerable waiting time, especially when it contains the OPT operations since the OPT operation is the least conventional operator in SPARQL. +It becomes essential to make a trade-off between the query response time and solution accuracy. We propose PRONA - an plugin for well-designed approximate queries in Jena, which provides help for users to answer well-designed SPARQL queries by approximate computation.The main features of PRONA comprise SPARQL query engine with approximate queries, as well as various approximate degrees for users to choose. + + SPARQL + + Approximate queries + Well-designed patterns + RDF + + The time of answering a SPARQL query with its all exact solutions in large scale RDF dataset possibly exceeds users' tolerable waiting time, especially when it contains the OPT operations since the OPT operation is the least conventional operator in SPARQL. +It becomes essential to make a trade-off between the query response time and solution accuracy. We propose PRONA - an plugin for well-designed approximate queries in Jena, which provides help for users to answer well-designed SPARQL queries by approximate computation.The main features of PRONA comprise SPARQL query engine with approximate queries, as well as various approximate degrees for users to choose. + + + + + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + + Well-designed patterns + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + RDF + + + Approximate queries + SPARQL + + + + + Extending SPARQL for data analytic tasks + + data analytics + + data integration + + + + + SPARQL has many nice features for accessing data integrated across different data sources, which is an important step in any data analysis task. We report the use of SPARQL for two real data analytic use cases from the healthcare and life sciences domains, which exposed certain weaknesses in the current specification of SPARQL; specifically when the data being integrated is most conveniently accessed via RESTful services and in formats beyond RDF, such as XML. We therefore extended SPARQL with generalized 'service', constructs for accessing services beyond the SPARQL endpoints supported by 'service'; for efficiency, our constructs additionally needed to support posting data, which is also not supported by 'service'. Furthermore, data from multiple sources led to natural modularity in the queries, with different portions of the query pertaining to different sources, so we also extended SPARQL with a simple 'function' mechanism to isolate the mechanics of accessing each endpoint. We provide an open source implementation of this SPARQL endpoint in an RDF store called Quetzal, and evaluate its use in the two data analytic scenarios over real datasets. + + data integration + data analytics + SPARQL has many nice features for accessing data integrated across different data sources, which is an important step in any data analysis task. We report the use of SPARQL for two real data analytic use cases from the healthcare and life sciences domains, which exposed certain weaknesses in the current specification of SPARQL; specifically when the data being integrated is most conveniently accessed via RESTful services and in formats beyond RDF, such as XML. We therefore extended SPARQL with generalized 'service', constructs for accessing services beyond the SPARQL endpoints supported by 'service'; for efficiency, our constructs additionally needed to support posting data, which is also not supported by 'service'. Furthermore, data from multiple sources led to natural modularity in the queries, with different portions of the query pertaining to different sources, so we also extended SPARQL with a simple 'function' mechanism to isolate the mechanics of accessing each endpoint. We provide an open source implementation of this SPARQL endpoint in an RDF store called Quetzal, and evaluate its use in the two data analytic scenarios over real datasets. + + + + Extending SPARQL for data analytic tasks + + + + + SPARQL + + + + + Extending SPARQL for data analytic tasks + + SPARQL + + + + + + + + + + + + + + + + + + + + + + + + + + + Query Translation + Internet of Things + + Query Translation + SQL + Internet of Things + Analytics + + PIOTRe: Personal Internet of Things Repository + + SPARQL + Web Observatory + + + PIOTRe: Personal Internet of Things Repository + Analytics + + + + RSP + RSP + PIOTRe: Personal Internet of Things Repository + Resource-constrained Internet of Things (IoT) devices like Raspberry Pis', with specific performance optimisation, can serve as interoperable personal Linked Data repositories for IoT applications. In this demo paper we describe PIOTRe, a personal datastore that utilises our sparql2sql query translation technology on Pis' to process, store and publish IoT time-series historical data and streams. We demonstrate, for a smart home scenario with PIOTRe: a real-time dashboard that utilises RDF stream processing, a set of descriptive analytics visualisations on historical data, a framework for registering stream queries within a local network and a means of sharing metadata globally with HyperCat and Web Observatories. + SQL + + + + + Web Observatory + + + + Resource-constrained Internet of Things (IoT) devices like Raspberry Pis', with specific performance optimisation, can serve as interoperable personal Linked Data repositories for IoT applications. In this demo paper we describe PIOTRe, a personal datastore that utilises our sparql2sql query translation technology on Pis' to process, store and publish IoT time-series historical data and streams. We demonstrate, for a smart home scenario with PIOTRe: a real-time dashboard that utilises RDF stream processing, a set of descriptive analytics visualisations on historical data, a framework for registering stream queries within a local network and a means of sharing metadata globally with HyperCat and Web Observatories. + SPARQL + + + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + 2016-10-21T11:30:00 + 2016-10-21T11:10:00 + Francesco Osborne, Angelo Salatino, Aliaksandr Birukou and Enrico Motta + + 2016-10-21T11:30:00 + + 2016-10-21T11:30:00 + + 2016-10-21T11:30:00 + 2016-10-21T11:10:00 + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T09:00:00 + 2016-10-20T10:00:00 + 2016-10-20T10:00:00 + 2016-10-20T10:00:00 + 2016-10-20T10:00:00 + 2016-10-20T09:00:00 + Keynote: Christian Bizer + Keynote: Christian Bizer + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université Joseph Fourier + Université Joseph Fourier + + + + + + + + Université Joseph Fourier + + + + + + + + + + + + + + + + + + + + + + + + + + 421ac6bc9506d2f2e1d6640498b35d6c6231a13c + + Heiko Maus + + + + + + + + + Heiko Maus + Heiko Maus + + + + + + + + + + + + + + + 6dc74861de4f6d7be14507d7925d2fcf4a9d3d62 + Ichiro Yamada + + + + + + + + + + + Ichiro Yamada + + Ichiro Yamada + + + + + + + + + Fumihito Nishino + + + fd1f1d905060f01fccf69d4bd0f37762449ecb1b + + Fumihito Nishino + + + + + + + Fumihito Nishino + + + + + + + + + cb0a1b9eb51b03495a7c0ee59829c06d201de120 + Thierry Monteil + + Thierry Monteil + + + + + + + Thierry Monteil + + + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + + + learning path + Curriculum for school is generated based on the academic year. For the reason that students need to learn many subjects every year, the relative topics are put into curricula in discrete. In this study, we propose a method to construct a dynamic learning path which enables us to learn the relative topics continuously. In this process, we define two kinds of similarity score, inheritance score and context similarity score to connect the learning path of relative topics. We also construct curriculum ontology with Resource Description Framework (RDF) to make the dynamic learning path accessible. Using the curriculum ontology, we develop a learning system for school which shows a dynamic learning path with broadcasted video clips. + education + + ontology + + + education + curriculum + resource description framework + natural language processing + + linked data + + + + + knowledge graph + natural language processing + + + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + resource description framework + curriculum + linked data + ontology + + + Curriculum for school is generated based on the academic year. For the reason that students need to learn many subjects every year, the relative topics are put into curricula in discrete. In this study, we propose a method to construct a dynamic learning path which enables us to learn the relative topics continuously. In this process, we define two kinds of similarity score, inheritance score and context similarity score to connect the learning path of relative topics. We also construct curriculum ontology with Resource Description Framework (RDF) to make the dynamic learning path accessible. Using the curriculum ontology, we develop a learning system for school which shows a dynamic learning path with broadcasted video clips. + + + + + learning path + + + knowledge graph + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + Linked Disambiguated Distributional Semantic Networks + 2016-10-21T10:50:00 + Stefano Faralli, Alexander Panchenko, Chris Biemann and Simone Paolo Ponzetto + Linked Disambiguated Distributional Semantic Networks + + + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + + + + + a97a78e57ea1fe838f9448e92463e2cae16b69b5 + + Sejin Chun + + Sejin Chun + + + + Sejin Chun + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Christian Hennig, Alexander Viehl, Benedikt Kämpgen and Harald Eisenmann + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + + + Ontology-Based Design of Space Systems + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + + 2016-10-20T16:50:00 + Ontology-Based Design of Space Systems + 2016-10-20T16:50:00 + + + Itziar Aldabe + + + + + + + + Itziar Aldabe + + + + + Itziar Aldabe + + + 2016-10-19T14:40:00 + + + 2016-10-19T14:20:00 + 2016-10-19T14:40:00 + 2016-10-19T14:40:00 + 2016-10-19T14:20:00 + Translating Ontologies in a Real-World Setting with ESSOT + + Mihael Arcan, Mauro Dragoni and Paul Buitelaar + 2016-10-19T14:40:00 + Translating Ontologies in a Real-World Setting with ESSOT + + + + + Yuan-Fang Li + + f1286b01c0c51c46b55258088fc54824d6b86bf1 + Yuan-Fang Li + + + + + + + Yuan-Fang Li + + + + + + + + + + + + + + + + + Serving Ireland's Geospatial Information as Linked Data + + + + + + Serving Ireland's Geospatial Information as Linked Data + + + + + Serving Ireland's Geospatial Information as Linked Data + + + In this paper we present data.geohive.ie, which aims to serve Ireland’s national geospatial data as authoritative Linked Data. Currently, the platform provides information on Irish administrative boundaries and the platform was designed to support two use cases: serving boundary data of geographic features at various level of detail and capturing the evolution of administrative boundaries. We report on the decisions taken for modeling and serving the information such as the adoption of an appropriate URI strategy, the devel-opment of necessary ontologies, and the use of (named) graphs to support the aforementioned use cases. + Ontology Engineering + + + Linked Data + In this paper we present data.geohive.ie, which aims to serve Ireland’s national geospatial data as authoritative Linked Data. Currently, the platform provides information on Irish administrative boundaries and the platform was designed to support two use cases: serving boundary data of geographic features at various level of detail and capturing the evolution of administrative boundaries. We report on the decisions taken for modeling and serving the information such as the adoption of an appropriate URI strategy, the devel-opment of necessary ontologies, and the use of (named) graphs to support the aforementioned use cases. + + + Ontology Engineering + + + Geospatial Data + Linked Data + + Geospatial Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Conference Linked Data: the ScholarlyData project + semantic web dog food + The Semantic Web Dog Food (SWDF) is the reference linked dataset of the Semantic Web community about papers, people, organisations, and events related to its academic conferences. In this paper we analyse the existing problems of generating, representing and maintaining Linked Data for the SWDF. With this work (i) we provide a refactored and cleaned SWDF dataset; (ii) we use a novel data model which improves the Semantic Web Conference Ontology, adopting best ontology design practices and (iii) we provide an open source maintenance workflow to support a healthy grow of the dataset beyond the Semantic Web conferences. + + + + linked data + + + + + + + + semantic web dog food + The Semantic Web Dog Food (SWDF) is the reference linked dataset of the Semantic Web community about papers, people, organisations, and events related to its academic conferences. In this paper we analyse the existing problems of generating, representing and maintaining Linked Data for the SWDF. With this work (i) we provide a refactored and cleaned SWDF dataset; (ii) we use a novel data model which improves the Semantic Web Conference Ontology, adopting best ontology design practices and (iii) we provide an open source maintenance workflow to support a healthy grow of the dataset beyond the Semantic Web conferences. + Conference Linked Data: the ScholarlyData project + + ontology design pattern + linked data + + ontology design pattern + + Conference Linked Data: the ScholarlyData project + + + + + + + + + + MIT + + MIT + + + MIT + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Octavian Rinciog and Vlad Posea + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + GovLOD: Towards a Linked Open Data Portal + + 2016-10-19T21:00:00 + + + GovLOD: Towards a Linked Open Data Portal + 2016-10-19T21:00:00 + + + + + + + + + + Jürgen Jakobitsch + 1592785a24a90d15257639d6016fb93706dfbb2d + Jürgen Jakobitsch + Jürgen Jakobitsch + + + + + + + + + + + + + + + + + + + + + + + WizeNoze + WizeNoze + + + WizeNoze + + + + + + + + + + + + + + + + + + + + + Hanoi University of Science and Technology + + + Hanoi University of Science and Technology + + + Hanoi University of Science and Technology + + + + + + + + + + + + Sebastian Neumaier + + + Sebastian Neumaier + da0be3bba39587dd2d9f5c2793e1307fe9ddb14a + Sebastian Neumaier + + + + + + Ivan Ermilov + ad7372712623405f14b0a640808cc490603e2163 + Ivan Ermilov + + + Ivan Ermilov + + + + + + + + + + + Philippe Cudré-Mauroux + + + + + b2a53294e6396a09d8b00cd04d5b90133946b642 + + + + + Philippe Cudre-Mauroux + + + Philippe Cudre-Mauroux + Philippe Cudré-Mauroux + + + Philippe Cudré-Mauroux + + + Philippe Cudre-Mauroux + + + + + + + + + + + + + + + + + + 2016-10-20T11:30:00 + + LinkGen: Multipurpose Linked Data Generator + + LinkGen: Multipurpose Linked Data Generator + 2016-10-20T11:30:00 + 2016-10-20T11:30:00 + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + Amit Joshi, Pascal Hitzler and Guozhu Dong + + 2016-10-20T11:30:00 + + + + Professor at the University of Oslo + + + Professor at the University of Oslo + Professor at the University of Oslo + + + + + + + + + + + + + + + + + + + Rakuten, Inc. + + + + Rakuten, Inc. + + + + + Rakuten, Inc. + + + + + + + + + + + + + + + + + 4e67340de2275d16373425555cb401a3006329ef + + Xiangnan Ren + + + + + + + Xiangnan Ren + + + Xiangnan Ren + + + + + + Space Systems + + + Systems Engineering + + + Reasoning + Ontology-Based Design of Space Systems + ECSS-E-TM-10-23 + Systems Engineering + + + MBSE + + + + + Conceptual Data Model + + In model-based systems engineering a model specifying the system's design is shared across a variety of disciplines and used to ensure the consistency and quality of the overall design. Existing implementations for describing these system models exhibit a number of shortcomings regarding their approach to data management. In this emerging applications paper, we present the application of an ontology for space system design providing increased semantic soundness of the underlying standardized data specification, enabling reasoners to identify problems in the system, and allowing the application of operational knowledge collected over past projects to the system to be designed. Based on a qualitative evaluation driven by data derived from an actual satellite design project, a reflection on the applicability of ontologies in the overall model-based systems engineering approach is pursued. + In model-based systems engineering a model specifying the system's design is shared across a variety of disciplines and used to ensure the consistency and quality of the overall design. Existing implementations for describing these system models exhibit a number of shortcomings regarding their approach to data management. In this emerging applications paper, we present the application of an ontology for space system design providing increased semantic soundness of the underlying standardized data specification, enabling reasoners to identify problems in the system, and allowing the application of operational knowledge collected over past projects to the system to be designed. Based on a qualitative evaluation driven by data derived from an actual satellite design project, a reflection on the applicability of ontologies in the overall model-based systems engineering approach is pursued. + Conceptual Data Model + Space Systems + + ECSS-E-TM-10-23 + + MBSE + + OWL + OWL + Reasoning + + Ontology-Based Design of Space Systems + + + Ontology-Based Design of Space Systems + + + + + Gulnar Mehdi + ecf592192952adf92e2607df0010d0848414bb61 + Gulnar Mehdi + + + + + Gulnar Mehdi + + + + + + + + + + Andreas Dengel + + + + + Andreas Dengel + d462fcc476df738f8b1552398c0edf6008b9c626 + + + + Andreas Dengel + + + + + + + + + + + + + + + + + + + + + Erfan Younesi + + + cb54f05d376eac6899d7d9b6efcf31c7718eb866 + + + + Erfan Younesi + Erfan Younesi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kazuya Ohshima + + + + + + d182d74107c24c54c79380a2cd7eddbe1b1d0b19 + Kazuya Ohshima + + Kazuya Ohshima + + + + + + + + + + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + geography ontology + + + Chinese linked geographical dataset + Chinese linked geographical dataset + + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + + While the geographical domain has long been involved as an important part of the Linked Data, the small amount of Chinese linked geographical data hinders the integration and sharing of both Chinese and cross-lingual knowledge. In this paper, we contribute to the development of a new Chinese linked geographical dataset named Clinga, by obtaining data from the largest Chinese wiki encyclopedia. We manually design a new geography ontology to categorize a wide range of physical and human geographical entities, and carry out an automatic discovery of links to existing knowledge bases. The resulted Clinga dataset contains over half million Chinese geographical entities and is open access. + + + + + + Linked Data + + + + Clinga + + geography ontology + + + + While the geographical domain has long been involved as an important part of the Linked Data, the small amount of Chinese linked geographical data hinders the integration and sharing of both Chinese and cross-lingual knowledge. In this paper, we contribute to the development of a new Chinese linked geographical dataset named Clinga, by obtaining data from the largest Chinese wiki encyclopedia. We manually design a new geography ontology to categorize a wide range of physical and human geographical entities, and carry out an automatic discovery of links to existing knowledge bases. The resulted Clinga dataset contains over half million Chinese geographical entities and is open access. + + + + Linked Data + Clinga + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + + + + + + + + + + + + + + + Graduate School of Yonsei University + + + + + + + + + + Graduate School of Yonsei University + Graduate School of Yonsei University + + + + + + Software Engineering + + Linked Data + Linked Data + Data Engineering + Semantic Web + Data Engineering + + Ontologies + + + Semantic Web + Enabling combined software and data engineering: the ALIGNED suite of ontologies + + Enabling combined software and data engineering: the ALIGNED suite of ontologies + Effective, collaborative integration of software and big data + engineering for Web-scale systems, is now a crucial technical and + economic challenge. This requires new combined data and software + engineering processes and tools. Semantic metadata standards and + linked data principles, provide a technical grounding for such + integrated systems given an appropriate model of the domain. In this + paper we introduce the ALIGNED suite of ontologies specifically + designed to model the information exchange needs of combined + software and data engineering. The models have been deployed to + enable: tool-chain integration, such as the exchange of data quality + reports; cross-domain communication, such as interlinked data and + software unit testing; mediation of the system design process + through the capture of design intents and as a source of context for + model-driven software engineering processes. These ontologies are + deployed in web-scale, data-intensive, system development + environments in both the commercial and academic domains. We + exemplify the usage of the suite on a complex collaborative software + and data engineering scenario from the legal information system + domain. + + + + Ontologies + Enabling combined software and data engineering: the ALIGNED suite of ontologies + + + Effective, collaborative integration of software and big data + engineering for Web-scale systems, is now a crucial technical and + economic challenge. This requires new combined data and software + engineering processes and tools. Semantic metadata standards and + linked data principles, provide a technical grounding for such + integrated systems given an appropriate model of the domain. In this + paper we introduce the ALIGNED suite of ontologies specifically + designed to model the information exchange needs of combined + software and data engineering. The models have been deployed to + enable: tool-chain integration, such as the exchange of data quality + reports; cross-domain communication, such as interlinked data and + software unit testing; mediation of the system design process + through the capture of design intents and as a source of context for + model-driven software engineering processes. These ontologies are + deployed in web-scale, data-intensive, system development + environments in both the commercial and academic domains. We + exemplify the usage of the suite on a complex collaborative software + and data engineering scenario from the legal information system + domain. + Software Engineering + + + + Metadata + + Data Mining + + Ontology Learning + Scholarly Ontologies + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + + Bibliographic Data + + + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + The process of classifying scholarly outputs is crucial to ensure timely access to knowledge. However, this process is typically carried out manually by expert editors, leading to high costs and slow throughput. In this paper we present Smart Topic Miner (STM), a novel solution which uses semantic web technologies to classify scholarly publications on the basis of a very large automatically generated ontology of research areas. STM was developed to support the Springer Nature Computer Science editorial team in classifying proceedings in the LNCS family. It analyses in real time a set of publications provided by an editor and produces a structured set of topics and a number of Springer Nature classification tags, which best characterise the given input. In this paper we present the architecture of the system and report on an evaluation study conducted with a team of Springer Nature editors. The results of the evaluation, which showed that STM classifies publications with a high degree of accuracy, are very encouraging and as a result we are currently discussing the required next steps to ensure large scale deployment within the company. + Data Mining + + + The process of classifying scholarly outputs is crucial to ensure timely access to knowledge. However, this process is typically carried out manually by expert editors, leading to high costs and slow throughput. In this paper we present Smart Topic Miner (STM), a novel solution which uses semantic web technologies to classify scholarly publications on the basis of a very large automatically generated ontology of research areas. STM was developed to support the Springer Nature Computer Science editorial team in classifying proceedings in the LNCS family. It analyses in real time a set of publications provided by an editor and produces a structured set of topics and a number of Springer Nature classification tags, which best characterise the given input. In this paper we present the architecture of the system and report on an evaluation study conducted with a team of Springer Nature editors. The results of the evaluation, which showed that STM classifies publications with a high degree of accuracy, are very encouraging and as a result we are currently discussing the required next steps to ensure large scale deployment within the company. + Ontology Learning + + + + Scholarly Data + Conference Proceedings + + Metadata + Scholarly Data + + + Conference Proceedings + + Bibliographic Data + + + + + Scholarly Ontologies + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + + + + + + + + + + + + + + + + Rosa Gil + + Rosa Gil + + + + + Rosa Gil + + + + + + + + Fraunhofer IAIS + + + + + + + Fraunhofer IAIS + + Fraunhofer IAIS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ziqi Zhang + + + + + Ziqi Zhang + + + + + + 23da01f2d0efbf766d22725f48eaee60e72096c1 + Ziqi Zhang + + + + + Duhai Alshukaili + + Duhai Alshukaili + + + 9f1925d57a346ce43fc0e013e19fce273dadc426 + + + Duhai Alshukaili + + + + + + + + + + + + db8051845a44150cf704274420bdc9c8954355c5 + + + + + + + Daniele Dell'Aglio + Daniele Dell'Aglio + + + Daniele Dell'Aglio + + + + + + + SPARQL + RDF + + + + + In this paper, we present a querying language for probabilistic RDF databases, where each triple has a probability, called pSRARQL, built on SPARQL, recommended by W3C as a querying language for RDF databases. Firstly, we present the syntax and semantics of pSPARQL. Secondly, we define the query problem of pSPARQL corresponding to probabilities of solutions. Finally, we show that the query evaluation of general pSPARQL patterns is PSPACE-complete. + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + In this paper, we present a querying language for probabilistic RDF databases, where each triple has a probability, called pSRARQL, built on SPARQL, recommended by W3C as a querying language for RDF databases. Firstly, we present the syntax and semantics of pSPARQL. Secondly, we define the query problem of pSPARQL corresponding to probabilities of solutions. Finally, we show that the query evaluation of general pSPARQL patterns is PSPACE-complete. + uncertain queries + + + SPARQL + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + Probabilistic RDF + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + + + RDF + + + + uncertain queries + Probabilistic RDF + + + + + + + + diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java index 2cacb0423..48226c461 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java @@ -38,6 +38,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; public class SkolemChaseExecutionFromToFile { + // FIXME: run on the same set if rules skolem (non-terminating, timeout, print + // the numbeer of generated facts), reset reasoner, run restricted chase public static void main(String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java index a60893639..753f51f93 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java @@ -43,12 +43,14 @@ /** * This example shows how an OWL ontology can be transformed into {@link Rule}s - * and {@link Fact}s using vlog4j-owlapi dependency libraries. + * and {@link Fact}s using vlog4j-owlapi dependency library. * * @author Irina Dragoste * */ public class RestrictedChaseOnOwlOntology { + + //FIXME change www.bike.org name public static void main(String[] args) throws OWLOntologyCreationException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java new file mode 100644 index 000000000..d1fd0a5bb --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java @@ -0,0 +1,213 @@ +package org.semanticweb.vlog4j.examples.rdf; + +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Set; + +import org.eclipse.jdt.annotation.NonNull; +import org.openrdf.model.Model; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; +import org.semanticweb.vlog4j.core.model.api.Atom; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.rdf.RDFModelToAtomsConverter; + +/** + * This example shows how different types of RDF resources can be parsed to RDF + * Models, and such Models can be converted to triple {@link Fact}s using + * vlog4j-rdf dependency library. + * + * @author Irina Dragoste + * + */ +public class AddDataFromRDFModel { + + public static void main(String[] args) throws IOException, RDFParseException, RDFHandlerException, + URISyntaxException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + + /* + * Local file containing metadata of publications from ISWC'16 conference, in + * RDF/XML format. + */ + final File rdfXMLResourceFile = new File("src/main/data/rdf/iswc-2016-complete-alignments.rdf"); + final FileInputStream inputStreamISWC2016 = new FileInputStream(rdfXMLResourceFile); + /* An RDF Model is obtained from parsing the RDF/XML resource. */ + final Model rdfModelISWC2016 = parseRDFResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), + RDFFormat.RDFXML); + + /* + * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2016 = RDFModelToAtomsConverter.rdfModelToAtoms(rdfModelISWC2016); + System.out.println("Example triple fact from iswc-2016"); + System.out.println(" - " + tripleFactsISWC2016.iterator().next()); + + /* + * URL of online resource containing metadata of publications from ISWC'17 + * conference, in TURTLE format. + */ + final URL turtleResourceURL = new URL( + "http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2017-complete-alignments.ttl"); + final InputStream inputStreamISWC2017 = turtleResourceURL.openStream(); + /* An RDF Model is obtained from parsing the TURTLE resource. */ + final Model rdfModelISWC2017 = parseRDFResource(inputStreamISWC2017, turtleResourceURL.toURI(), + RDFFormat.TURTLE); + + /* + * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2017 = RDFModelToAtomsConverter.rdfModelToAtoms(rdfModelISWC2017); + System.out.println("Example triple fact from iswc-2017"); + System.out.println(" - " + tripleFactsISWC2017.iterator().next()); + + /** + * We wish to combine triples about a person's affiliation, an affiliation's + * organization and an organization's name, to find a person's organization + * name. + */ + + /* Predicate names of the triples found in both RDF files. */ + final Constant constHasAffiiation = Expressions + .makeConstant("https://w3id.org/scholarlydata/ontology/conference-ontology.owl#hasAffiliation"); + final Constant constWithOrganization = Expressions + .makeConstant("https://w3id.org/scholarlydata/ontology/conference-ontology.owl#withOrganisation"); + final Constant constName = Expressions + .makeConstant("https://w3id.org/scholarlydata/ontology/conference-ontology.owl#name"); + + final Variable varOganization = Expressions.makeVariable("organization"); + final Variable varOganizationName = Expressions.makeVariable("organizationName"); + final Variable varPerson = Expressions.makeVariable("person"); + final Variable varAfiliation = Expressions.makeVariable("affiliation"); + + /* Patterns for facts extracted from RDF triples. */ + final Atom personHasAffiliation = Expressions.makeAtom(RDFModelToAtomsConverter.RDF_TRIPLE_PREDICATE, varPerson, + constHasAffiiation, varAfiliation); + final Atom affiliationWithOrganization = Expressions.makeAtom(RDFModelToAtomsConverter.RDF_TRIPLE_PREDICATE, + varAfiliation, constWithOrganization, varOganization); + final Atom organizationHasName = Expressions.makeAtom(RDFModelToAtomsConverter.RDF_TRIPLE_PREDICATE, + varOganization, constName, varOganizationName); + + /* + * We create a Rule that retrieves pairs of persons and their organization name, + * from facts extracted from RDF triples. + */ + final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); + final Atom creatorOrganizationName = Expressions.makeAtom(predicateHasOrganizationName, varPerson, varOganizationName); + + /* + * hasOrganizationName(person, organizationName) :- TRIPLE(person, , affiliation), + * TRIPLE(affiliation, , organization), + * TRIPLE(organization, , organizationName) + */ + final Rule organizationRule = Expressions.makeRule(creatorOrganizationName, personHasAffiliation, + affiliationWithOrganization, organizationHasName); + + try (final Reasoner reasoner = Reasoner.getInstance();) { + /* + * Facts extracted from the RDF resources are added to the Reasoner's knowledge + * base. + */ + reasoner.addFacts(tripleFactsISWC2016); + reasoner.addFacts(tripleFactsISWC2017); + /* + * The rule that maps people to their organization name based on facts extracted + * from RDF triples is added to the Reasoner's knowledge base. + */ + reasoner.addRules(organizationRule); + + reasoner.load(); + reasoner.reason(); + + /* We query for persons whose organization name is "TU Dresden" .*/ + final Constant constantTuDresdenOrganization = Expressions.makeConstant("\"TU Dresden\""); + @NonNull + final Atom queryTUDresdenParticipantsAtISWC = Expressions.makeAtom(predicateHasOrganizationName, varPerson, + constantTuDresdenOrganization); + + System.out.println("Participants at ISWC'16 and '17 from Organization 'TU Dresden':"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, + false)) { + queryResultIterator.forEachRemaining(answer -> System.out + .println(" - " + answer.getTerms().get(0) + ", organization " + answer.getTerms().get(1))); + } + + } + + } + + /** + * Parses the data from the supplied InputStream, using the supplied baseURI to + * resolve any relative URI references. + * + * @param inputStream + * The content to be parsed, expected to be in the given + * {@code rdfFormat}. + * @param baseURI + * The URI associated with the data in the InputStream. + * @param rdfFormat + * The expected RDFformat of the inputStream resource that is to be + * parsed. + * @return A Model containing the RDF triples. Blanks have unique ids across + * different models. + * @throws IOException + * If an I/O error occurred while data was read from the + * InputStream. + * @throws RDFParseException + * If the parser has found an unrecoverable parse error. + * @throws RDFHandlerException + * If the configured statement handler has encountered an + * unrecoverable error. + */ + private static Model parseRDFResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) + throws IOException, RDFParseException, RDFHandlerException { + final Model model = new LinkedHashModel(); + + final RDFParser rdfParser = Rio.createParser(rdfFormat); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseURI.toString()); + + return model; + } + +} From 5bdecddce394cc037cb3ad4f8197be342b7deaf1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 May 2018 17:08:40 +0200 Subject: [PATCH 0017/1255] improved javadoc --- vlog4j-examples/src/main/data/owl/bike.owl | 168 +++++++++--------- .../owlapi/RestrictedChaseOnOwlOntology.java | 23 ++- .../examples/rdf/AddDataFromRDFModel.java | 21 +-- 3 files changed, 105 insertions(+), 107 deletions(-) diff --git a/vlog4j-examples/src/main/data/owl/bike.owl b/vlog4j-examples/src/main/data/owl/bike.owl index a94284aed..56e753ed6 100644 --- a/vlog4j-examples/src/main/data/owl/bike.owl +++ b/vlog4j-examples/src/main/data/owl/bike.owl @@ -1,84 +1,84 @@ -@prefix : . -@prefix owl: . -@prefix rdf: . -@prefix xml: . -@prefix xsd: . -@prefix rdfs: . -@base . - - rdf:type owl:Ontology . - -################################################################# -# Object Properties -################################################################# - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPart -:hasPart rdf:type owl:ObjectProperty ; - owl:inverseOf :isPartOf . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPartSource -:hasPartSource rdf:type owl:ObjectProperty ; - rdfs:subPropertyOf :hasPart . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOf -:isPartOf rdf:type owl:ObjectProperty . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOfSource -:isPartOfSource rdf:type owl:ObjectProperty ; - rdfs:subPropertyOf :isPartOf . - - -################################################################# -# Classes -################################################################# - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Bike -:Bike rdf:type owl:Class ; - rdfs:subClassOf [ rdf:type owl:Restriction ; - owl:onProperty :hasPart ; - owl:someValuesFrom :Wheel - ] . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#BikeSource -:BikeSource rdf:type owl:Class ; - rdfs:subClassOf :Bike . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Wheel -:Wheel rdf:type owl:Class ; - rdfs:subClassOf [ rdf:type owl:Restriction ; - owl:onProperty :isPartOf ; - owl:someValuesFrom :Bike - ] . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#WheelSource -:WheelSource rdf:type owl:Class ; - rdfs:subClassOf :Wheel . - - -################################################################# -# Individuals -################################################################# - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b1 -:b1 rdf:type owl:NamedIndividual , - :BikeSource ; - :hasPartSource :w1 . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b2 -:b2 rdf:type owl:NamedIndividual , - :BikeSource . - - -### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#w1 -:w1 rdf:type owl:NamedIndividual , - :WheelSource . - - -### Generated by the OWL API (version 4.2.8.20170104-2310) https://github.com/owlcs/owlapi +@prefix : . +@prefix owl: . +@prefix rdf: . +@prefix xml: . +@prefix xsd: . +@prefix rdfs: . +@base . + + rdf:type owl:Ontology . + +################################################################# +# Object Properties +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPart +:hasPart rdf:type owl:ObjectProperty ; + owl:inverseOf :isPartOf . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPartSource +:hasPartSource rdf:type owl:ObjectProperty ; + rdfs:subPropertyOf :hasPart . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOf +:isPartOf rdf:type owl:ObjectProperty . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOfSource +:isPartOfSource rdf:type owl:ObjectProperty ; + rdfs:subPropertyOf :isPartOf . + + +################################################################# +# Classes +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Bike +:Bike rdf:type owl:Class ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onProperty :hasPart ; + owl:someValuesFrom :Wheel + ] . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#BikeSource +:BikeSource rdf:type owl:Class ; + rdfs:subClassOf :Bike . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Wheel +:Wheel rdf:type owl:Class ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onProperty :isPartOf ; + owl:someValuesFrom :Bike + ] . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#WheelSource +:WheelSource rdf:type owl:Class ; + rdfs:subClassOf :Wheel . + + +################################################################# +# Individuals +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b1 +:b1 rdf:type owl:NamedIndividual , + :BikeSource ; + :hasPartSource :w1 . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b2 +:b2 rdf:type owl:NamedIndividual , + :BikeSource . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#w1 +:w1 rdf:type owl:NamedIndividual , + :WheelSource . + + +### Generated by the OWL API (version 4.2.8.20170104-2310) https://github.com/owlcs/owlapi diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java index 753f51f93..bf7a8f12a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java @@ -42,15 +42,15 @@ import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; /** - * This example shows how an OWL ontology can be transformed into {@link Rule}s - * and {@link Fact}s using vlog4j-owlapi dependency library. + * This example shows how vlog4j-owlapi library (class {@link OwlToRulesConverter}) can be used to transform + * an OWL ontology into vlog4j-core {@link Rule}s and {@link Atom}s. * * @author Irina Dragoste * */ public class RestrictedChaseOnOwlOntology { - - //FIXME change www.bike.org name + + // FIXME change www.bike.org name public static void main(String[] args) throws OWLOntologyCreationException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { @@ -61,7 +61,7 @@ public static void main(String[] args) throws OWLOntologyCreationException, Reas /* * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in - * source ontology to target Rule and Fact objects + * source ontology to target Rule and Atom objects */ OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); owlToRulesConverter.addOntology(ontology); @@ -92,19 +92,16 @@ public static void main(String[] args) throws OWLOntologyCreationException, Reas /* Query for the parts of bike constant "b2". */ Variable vx = Expressions.makeVariable("x"); - Constant b2 = Expressions.makeConstant("http://www.bike.org#b2"); - Atom isPartOfPairs = Expressions.makeAtom("http://www.bike.org#isPartOf", vx, b2); + Constant b2 = Expressions.makeConstant("http://www.bike-example.ontology#b2"); + Atom isPartOfPairs = Expressions.makeAtom("http://www.bike-example.ontology#isPartOf", vx, b2); /* * See that an unnamed individual has been introduced to satisfy * owl:someValuesFrom restriction: * - * http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Bike - * :Bike rdf:type owl:Class ; - * rdfs:subClassOf [ rdf:type owl:Restriction ; - * owl:onProperty :hasPart ; - * owl:someValuesFrom :Wheel - * ] . + * http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2# + * Bike :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; + * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . */ try (QueryResultIterator answers = reasoner.answerQuery(isPartOfPairs, true);) { answers.forEachRemaining(answer -> System.out diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java index d1fd0a5bb..3371e9dc1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java @@ -52,10 +52,10 @@ import org.semanticweb.vlog4j.rdf.RDFModelToAtomsConverter; /** - * This example shows how different types of RDF resources can be parsed to RDF - * Models, and such Models can be converted to triple {@link Fact}s using - * vlog4j-rdf dependency library. - * + * This example shows how vlog4j-rdf library's utility class + * {@link RDFModelToAtomsConverter} can be used to convert RDF {@link Model}s + * from various types of RDF resources to vlog4j-core {@code Atom} sets. + * * @author Irina Dragoste * */ @@ -133,12 +133,13 @@ public static void main(String[] args) throws IOException, RDFParseException, RD * from facts extracted from RDF triples. */ final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); - final Atom creatorOrganizationName = Expressions.makeAtom(predicateHasOrganizationName, varPerson, varOganizationName); - + final Atom creatorOrganizationName = Expressions.makeAtom(predicateHasOrganizationName, varPerson, + varOganizationName); + /* - * hasOrganizationName(person, organizationName) :- TRIPLE(person, , affiliation), - * TRIPLE(affiliation, , organization), - * TRIPLE(organization, , organizationName) + * hasOrganizationName(person, organizationName) :- TRIPLE(person,, affiliation), + * TRIPLE(affiliation, , organization), + * TRIPLE(organization, , organizationName) */ final Rule organizationRule = Expressions.makeRule(creatorOrganizationName, personHasAffiliation, affiliationWithOrganization, organizationHasName); @@ -159,7 +160,7 @@ public static void main(String[] args) throws IOException, RDFParseException, RD reasoner.load(); reasoner.reason(); - /* We query for persons whose organization name is "TU Dresden" .*/ + /* We query for persons whose organization name is "TU Dresden" . */ final Constant constantTuDresdenOrganization = Expressions.makeConstant("\"TU Dresden\""); @NonNull final Atom queryTUDresdenParticipantsAtISWC = Expressions.makeAtom(predicateHasOrganizationName, varPerson, From 12c7821ca602213bcb7ad7a1f1209f5b319b31a8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 8 May 2018 17:34:33 +0200 Subject: [PATCH 0018/1255] created one example class for skolem vs restricted chase --- vlog4j-examples/README.md | 5 +- ...romToFile.java => AddDataFromCSVFile.java} | 2 +- ...> SkolemVsRestrictedChaseTermination.java} | 264 ++++++++++-------- 3 files changed, 144 insertions(+), 127 deletions(-) rename vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/{SkolemChaseExecutionFromToFile.java => AddDataFromCSVFile.java} (99%) rename vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/{RestrictedChaseExecutionInMemory.java => SkolemVsRestrictedChaseTermination.java} (65%) diff --git a/vlog4j-examples/README.md b/vlog4j-examples/README.md index 789743204..602258211 100644 --- a/vlog4j-examples/README.md +++ b/vlog4j-examples/README.md @@ -1,5 +1,6 @@ This project contains examples of different use-cases of **vlog4j** functionality. -- reasoning with th default Restricted Chase algorithm : RestrictedChaseExecutionInMemory.java -- reasoning with Skolem Chase algorithm : SkolemChaseExecutionFromToFile.java +- reasoning termination for various algorithms: Skolem and Restricted Chase algorithm : SkolemVsRestrictedChaseTermination.java +- adding facts from a CSV file; exporting query results to CSV: AddDataFromCSVFile.java +- adding facts from the result of a SPARQL query on a remote endpoint: AddDataFromSparqlQueryResults.java - converting an OWL ontology into rules and facts: owlapi.RestrictedChaseOnOwlOntology.java - converting an RDF resource into facts: rdf.AddDataFromRDFModel.java \ No newline at end of file diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromCSVFile.java similarity index 99% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java rename to vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromCSVFile.java index 48226c461..aa96fa1b3 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromCSVFile.java @@ -37,7 +37,7 @@ import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -public class SkolemChaseExecutionFromToFile { +public class AddDataFromCSVFile { // FIXME: run on the same set if rules skolem (non-terminating, timeout, print // the numbeer of generated facts), reset reasoner, run restricted chase diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/RestrictedChaseExecutionInMemory.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java similarity index 65% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/RestrictedChaseExecutionInMemory.java rename to vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java index 05ec7fe28..bc52e4b4e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/RestrictedChaseExecutionInMemory.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java @@ -1,124 +1,140 @@ -package org.semanticweb.vlog4j.examples; - -import java.io.IOException; - -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -/*- - * #%L - * examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public class RestrictedChaseExecutionInMemory { - public static void main(String[] args) - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - - // 1. Instantiating entities, rules and facts - final Predicate bicycleIDB = Expressions.makePredicate("BicycleIDB", 1); - final Predicate bicycleEDB = Expressions.makePredicate("BicycleEDB", 1); - final Predicate wheelIDB = Expressions.makePredicate("WheelIDB", 1); - final Predicate wheelEDB = Expressions.makePredicate("WheelEDB", 1); - final Predicate hasPartIDB = Expressions.makePredicate("HasPartIDB", 2); - final Predicate hasPartEDB = Expressions.makePredicate("HasPartEDB", 2); - final Predicate isPartOfIDB = Expressions.makePredicate("IsPartOfIDB", 2); - final Predicate isPartOfEDB = Expressions.makePredicate("IsPartOfEDB", 2); - final Constant bicycle1 = Expressions.makeConstant("bicycle1"); - final Constant bicycle2 = Expressions.makeConstant("bicycle2"); - final Constant wheel1 = Expressions.makeConstant("wheel1"); - final Variable x = Expressions.makeVariable("x"); - final Variable y = Expressions.makeVariable("y"); - - // BicycleIDB(?x) :- BicycleEDB(?x) . - final Atom bicycleIDBX = Expressions.makeAtom(bicycleIDB, x); - final Atom bicycleEDBX = Expressions.makeAtom(bicycleEDB, x); - final Rule rule1 = Expressions.makeRule(bicycleIDBX, bicycleEDBX); - - // WheelIDB(?x) :- WheelEDB(?x) . - final Atom wheelIDBX = Expressions.makeAtom(wheelIDB, x); - final Atom wheelEDBX = Expressions.makeAtom(wheelEDB, x); - final Rule rule2 = Expressions.makeRule(wheelIDBX, wheelEDBX); - - // hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . - final Atom hasPartIDBXY = Expressions.makeAtom(hasPartIDB, x, y); - final Atom hasPartEDBXY = Expressions.makeAtom(hasPartEDB, x, y); - final Rule rule3 = Expressions.makeRule(hasPartIDBXY, hasPartEDBXY); - - // isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . - final Atom isPartOfIDBXY = Expressions.makeAtom(isPartOfIDB, x, y); - final Atom isPartOfEDBXY = Expressions.makeAtom(isPartOfEDB, x, y); - final Rule rule4 = Expressions.makeRule(isPartOfIDBXY, isPartOfEDBXY); - - // HasPartIDB(?x, !y), WheelIDB(!y) :- BicycleIDB(?x) . - final Atom wheelIDBY = Expressions.makeAtom(wheelIDB, y); - final Rule rule5 = Expressions.makeRule(Expressions.makeConjunction(hasPartIDBXY, wheelIDBY), - Expressions.makeConjunction(bicycleIDBX)); - - // IsPartOfIDB(?x, !y), BicycleIDB(!y) :- WheelIDB(?x) . - final Atom bycicleIDBY = Expressions.makeAtom(bicycleIDB, y); - final Rule rule6 = Expressions.makeRule(Expressions.makeConjunction(isPartOfIDBXY, bycicleIDBY), - Expressions.makeConjunction(wheelIDBX)); - - // IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . - final Atom hasPartIDBYX = Expressions.makeAtom(hasPartIDB, y, x); - final Rule rule7 = Expressions.makeRule(isPartOfIDBXY, hasPartIDBYX); - - // HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . - final Atom isPartOfIDBYX = Expressions.makeAtom(isPartOfIDB, y, x); - final Rule rule8 = Expressions.makeRule(hasPartIDBXY, isPartOfIDBYX); - - // BicycleEDB(bicycle1) . - final Atom fact1 = Expressions.makeAtom(bicycleEDB, bicycle1); - - // HasPartEDB(bicycle1, wheel1) . - final Atom fact2 = Expressions.makeAtom(hasPartEDB, bicycle1, wheel1); - - // Wheel(wheel1) . - final Atom fact3 = Expressions.makeAtom(wheelEDB, wheel1); - - // BicycleEDB(b) . - final Atom fact4 = Expressions.makeAtom(bicycleEDB, bicycle2); - - // 2. Loading, reasoning, and querying. - // Use try-with resources, or remember to call close() to free the reasoner - // resources. - try (Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - - reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); - reasoner.addFacts(fact1, fact2, fact3, fact4); - reasoner.load(); - - ExamplesUtil.printOutQueryAnswers(hasPartEDBXY, reasoner); - - reasoner.reason(); - - ExamplesUtil.printOutQueryAnswers(hasPartIDBXY, reasoner); - } - } - -} +package org.semanticweb.vlog4j.examples; + +import java.io.IOException; + +import org.semanticweb.vlog4j.core.model.api.Atom; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; + +/** + * This example shows non-termination of the Skolem Chase, versus termination of + * the Restricted Chase on the same set of rules and facts. Note that the + * Restricted Chase is the default reasoning algorithm, as it terminates in most + * cases and generates a smaller number of facts. + * + * @author Irina Dragoste + * + */ +public class SkolemVsRestrictedChaseTermination { + + public static void main(String[] args) + throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + // 1. Instantiating entities, rules and facts + final Predicate bicycleIDB = Expressions.makePredicate("BicycleIDB", 1); + final Predicate bicycleEDB = Expressions.makePredicate("BicycleEDB", 1); + final Predicate wheelIDB = Expressions.makePredicate("WheelIDB", 1); + final Predicate wheelEDB = Expressions.makePredicate("WheelEDB", 1); + final Predicate hasPartIDB = Expressions.makePredicate("HasPartIDB", 2); + final Predicate hasPartEDB = Expressions.makePredicate("HasPartEDB", 2); + final Predicate isPartOfIDB = Expressions.makePredicate("IsPartOfIDB", 2); + final Predicate isPartOfEDB = Expressions.makePredicate("IsPartOfEDB", 2); + final Constant bicycle1 = Expressions.makeConstant("bicycle1"); + final Constant bicycle2 = Expressions.makeConstant("bicycle2"); + final Constant wheel1 = Expressions.makeConstant("wheel1"); + final Variable x = Expressions.makeVariable("x"); + final Variable y = Expressions.makeVariable("y"); + + // BicycleIDB(?x) :- BicycleEDB(?x) . + final Atom bicycleIDBX = Expressions.makeAtom(bicycleIDB, x); + final Atom bicycleEDBX = Expressions.makeAtom(bicycleEDB, x); + final Rule rule1 = Expressions.makeRule(bicycleIDBX, bicycleEDBX); + + // WheelIDB(?x) :- WheelEDB(?x) . + final Atom wheelIDBX = Expressions.makeAtom(wheelIDB, x); + final Atom wheelEDBX = Expressions.makeAtom(wheelEDB, x); + final Rule rule2 = Expressions.makeRule(wheelIDBX, wheelEDBX); + + // hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . + final Atom hasPartIDBXY = Expressions.makeAtom(hasPartIDB, x, y); + final Atom hasPartEDBXY = Expressions.makeAtom(hasPartEDB, x, y); + final Rule rule3 = Expressions.makeRule(hasPartIDBXY, hasPartEDBXY); + + // isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . + final Atom isPartOfIDBXY = Expressions.makeAtom(isPartOfIDB, x, y); + final Atom isPartOfEDBXY = Expressions.makeAtom(isPartOfEDB, x, y); + final Rule rule4 = Expressions.makeRule(isPartOfIDBXY, isPartOfEDBXY); + + // HasPartIDB(?x, !y), WheelIDB(!y) :- BicycleIDB(?x) . + final Atom wheelIDBY = Expressions.makeAtom(wheelIDB, y); + final Rule rule5 = Expressions.makeRule(Expressions.makeConjunction(hasPartIDBXY, wheelIDBY), + Expressions.makeConjunction(bicycleIDBX)); + + // IsPartOfIDB(?x, !y), BicycleIDB(!y) :- WheelIDB(?x) . + final Atom bycicleIDBY = Expressions.makeAtom(bicycleIDB, y); + final Rule rule6 = Expressions.makeRule(Expressions.makeConjunction(isPartOfIDBXY, bycicleIDBY), + Expressions.makeConjunction(wheelIDBX)); + + // IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . + final Atom hasPartIDBYX = Expressions.makeAtom(hasPartIDB, y, x); + final Rule rule7 = Expressions.makeRule(isPartOfIDBXY, hasPartIDBYX); + + // HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . + final Atom isPartOfIDBYX = Expressions.makeAtom(isPartOfIDB, y, x); + final Rule rule8 = Expressions.makeRule(hasPartIDBXY, isPartOfIDBYX); + + // BicycleEDB(bicycle1) . + final Atom fact1 = Expressions.makeAtom(bicycleEDB, bicycle1); + + // HasPartEDB(bicycle1, wheel1) . + final Atom fact2 = Expressions.makeAtom(hasPartEDB, bicycle1, wheel1); + + // Wheel(wheel1) . + final Atom fact3 = Expressions.makeAtom(wheelEDB, wheel1); + + // BicycleEDB(b) . + final Atom fact4 = Expressions.makeAtom(bicycleEDB, bicycle2); + + // 2. Loading, reasoning, and querying. + // Use try-with resources, or remember to call close() to free the reasoner + // resources. + try (Reasoner reasoner = Reasoner.getInstance()) { + + reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); + reasoner.addFacts(fact1, fact2, fact3, fact4); + reasoner.load(); + + System.out.println("Answers to query " + hasPartIDBXY + " before reasoning:"); + printOutQueryAnswers(hasPartIDBXY, reasoner); + + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + reasoner.setReasoningTimeout(1); + System.out.println("Starting Skolem Chase with 1 second timeout."); + boolean skolemChaseFinished = reasoner.reason(); + System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); + System.out.println( + "Answers to query " + hasPartIDBXY + " after reasoning with the Skolem Chase for 1 second:"); + printOutQueryAnswers(hasPartIDBXY, reasoner); + + System.out.println(); + System.out.println("Reseting reasoner; discarding facts generated during reasoning."); + reasoner.resetReasoner(); + reasoner.load(); + + System.out.println("Answers to query " + hasPartIDBXY + " before reasoning:"); + printOutQueryAnswers(hasPartIDBXY, reasoner); + + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + reasoner.setReasoningTimeout(null); + System.out.println("Starting Restricted Chase with no timeout."); + boolean restrictedChaseFinished = reasoner.reason(); + System.out.println("Has Restricted Chase algorithm finished? " + restrictedChaseFinished); + System.out.println("Answers to query " + hasPartIDBXY + " after reasoning with the Restricted Chase:"); + printOutQueryAnswers(hasPartIDBXY, reasoner); + } + } + + private static void printOutQueryAnswers(Atom queryAtom, Reasoner reasoner) throws ReasonerStateException { + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true)) { + queryResultIterator.forEachRemaining(queryResult -> System.out.println(" - " + queryResult)); + } + } +} From 16adf10b7c02ef9b6ad7cc06522fa38c5cb0940e Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 8 May 2018 17:37:21 +0200 Subject: [PATCH 0019/1255] add License header --- .../SkolemVsRestrictedChaseTermination.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java index bc52e4b4e..f330e452e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.vlog4j.core.model.api.Atom; From ef0cc921292b1b4750c10147ca405f8de782b7f9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 8 May 2018 17:40:26 +0200 Subject: [PATCH 0020/1255] Update README.md --- vlog4j-examples/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/vlog4j-examples/README.md b/vlog4j-examples/README.md index 602258211..9bc5115cb 100644 --- a/vlog4j-examples/README.md +++ b/vlog4j-examples/README.md @@ -1,6 +1,6 @@ This project contains examples of different use-cases of **vlog4j** functionality. -- reasoning termination for various algorithms: Skolem and Restricted Chase algorithm : SkolemVsRestrictedChaseTermination.java -- adding facts from a CSV file; exporting query results to CSV: AddDataFromCSVFile.java -- adding facts from the result of a SPARQL query on a remote endpoint: AddDataFromSparqlQueryResults.java -- converting an OWL ontology into rules and facts: owlapi.RestrictedChaseOnOwlOntology.java -- converting an RDF resource into facts: rdf.AddDataFromRDFModel.java \ No newline at end of file +- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : SkolemVsRestrictedChaseTermination.java +- adding facts from a **CSV file**; exporting query results to CSV: AddDataFromCSVFile.java +- adding facts from the result of a **SPARQL query** on a remote endpoint: AddDataFromSparqlQueryResults.java +- converting an **OWL ontology** into rules and facts: owlapi.RestrictedChaseOnOwlOntology.java +- converting an **RDF resource** into facts: rdf.AddDataFromRDFModel.java From 5cc50a0d6e3a54faf1cc0e4b06917a03bd30d2bf Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 8 May 2018 17:44:58 +0200 Subject: [PATCH 0021/1255] Update Readme.md for examples --- vlog4j-examples/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/vlog4j-examples/README.md b/vlog4j-examples/README.md index 9bc5115cb..c80f17860 100644 --- a/vlog4j-examples/README.md +++ b/vlog4j-examples/README.md @@ -1,6 +1,6 @@ This project contains examples of different use-cases of **vlog4j** functionality. -- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : SkolemVsRestrictedChaseTermination.java -- adding facts from a **CSV file**; exporting query results to CSV: AddDataFromCSVFile.java -- adding facts from the result of a **SPARQL query** on a remote endpoint: AddDataFromSparqlQueryResults.java -- converting an **OWL ontology** into rules and facts: owlapi.RestrictedChaseOnOwlOntology.java -- converting an **RDF resource** into facts: rdf.AddDataFromRDFModel.java +- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : *SkolemVsRestrictedChaseTermination.java* +- adding facts from a **CSV file**; exporting query results to CSV: *AddDataFromCSVFile.java* +- adding facts from the result of a **SPARQL query** on a remote endpoint: *AddDataFromSparqlQueryResults.java* +- converting an **OWL ontology** into rules and facts: *owlapi.RestrictedChaseOnOwlOntology.java* +- converting an **RDF resource** into facts: *rdf.AddDataFromRDFModel.java* From b821f76884769a9e9f9808378a5c7da5e22ca17d Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 9 May 2018 10:52:27 +0200 Subject: [PATCH 0022/1255] rename **RestrictedChaseOnOwlOntology** to **OwlOntologyToRulesAndFacts** for a clearer suggestion of how to transform the OWL ontology to rules and facts. --- vlog4j-examples/README.md | 2 +- ...y.java => OwlOntologyToRulesAndFacts.java} | 58 ++++++++++++++----- 2 files changed, 46 insertions(+), 14 deletions(-) rename vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/{RestrictedChaseOnOwlOntology.java => OwlOntologyToRulesAndFacts.java} (63%) diff --git a/vlog4j-examples/README.md b/vlog4j-examples/README.md index c80f17860..b76702fd9 100644 --- a/vlog4j-examples/README.md +++ b/vlog4j-examples/README.md @@ -2,5 +2,5 @@ This project contains examples of different use-cases of **vlog4j** functionalit - reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : *SkolemVsRestrictedChaseTermination.java* - adding facts from a **CSV file**; exporting query results to CSV: *AddDataFromCSVFile.java* - adding facts from the result of a **SPARQL query** on a remote endpoint: *AddDataFromSparqlQueryResults.java* -- converting an **OWL ontology** into rules and facts: *owlapi.RestrictedChaseOnOwlOntology.java* +- converting an **OWL ontology** into rules and facts; reasoning on an **OWL ontology** : *owlapi.OwlOntologyToRulesAndFacts.java* - converting an **RDF resource** into facts: *rdf.AddDataFromRDFModel.java* diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java similarity index 63% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java rename to vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index bf7a8f12a..c77bc0a5e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/RestrictedChaseOnOwlOntology.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.model.api.Atom; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -42,15 +43,14 @@ import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; /** - * This example shows how vlog4j-owlapi library (class {@link OwlToRulesConverter}) can be used to transform - * an OWL ontology into vlog4j-core {@link Rule}s and {@link Atom}s. + * This example shows how vlog4j-owlapi library (class + * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into + * vlog4j-core {@link Rule}s and {@link Atom}s. * * @author Irina Dragoste * */ -public class RestrictedChaseOnOwlOntology { - - // FIXME change www.bike.org name +public class OwlOntologyToRulesAndFacts { public static void main(String[] args) throws OWLOntologyCreationException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { @@ -87,26 +87,58 @@ public static void main(String[] args) throws OWLOntologyCreationException, Reas reasoner.addRules(new ArrayList(owlToRulesConverter.getRules())); reasoner.addFacts(owlToRulesConverter.getFacts()); reasoner.load(); - /* Reason over loaded ontology */ + + /* Reason over loaded ontology with the default algorithm Restricted Chase*/ + System.out.println("Reasoning default algorithm: "+ reasoner.getAlgorithm()); reasoner.reason(); /* Query for the parts of bike constant "b2". */ Variable vx = Expressions.makeVariable("x"); Constant b2 = Expressions.makeConstant("http://www.bike-example.ontology#b2"); - Atom isPartOfPairs = Expressions.makeAtom("http://www.bike-example.ontology#isPartOf", vx, b2); + + Atom b2HasPart = Expressions.makeAtom("http://www.bike-example.ontology#hasPart", b2, vx); + System.out.println("Answers to query " + b2HasPart + " :"); /* * See that an unnamed individual has been introduced to satisfy * owl:someValuesFrom restriction: * - * http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2# - * Bike :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; - * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . + * :Bike rdf:type owl:Class ; + * rdfs:subClassOf [ rdf:type owl:Restriction ; + * owl:onProperty :hasPart ; + * owl:someValuesFrom :Wheel + * ] . */ - try (QueryResultIterator answers = reasoner.answerQuery(isPartOfPairs, true);) { - answers.forEachRemaining(answer -> System.out - .println(answer.getTerms().get(0) + " isPartOf " + answer.getTerms().get(1))); + try (QueryResultIterator answers = reasoner.answerQuery(b2HasPart, true);) { + answers.forEachRemaining(answer -> { + final Term constantB2 = answer.getTerms().get(0); + final Term term = answer.getTerms().get(1); + System.out.println(" - " + constantB2 + " hasPart " + term); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); } + + Atom isPartOfB2 = Expressions.makeAtom("http://www.bike-example.ontology#isPartOf", vx, b2); + + System.out.println("Answers to query " + isPartOfB2 + " :"); + /* + * See that the same unnamed individual is part of Bike b2, satisfying restriction + * :Wheel rdf:type owl:Class ; + * rdfs:subClassOf [ rdf:type owl:Restriction ; + * owl:onProperty :isPartOf ; + * owl:someValuesFrom :Bike + * ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(isPartOfB2, true);) { + answers.forEachRemaining(answer -> { + Term term = answer.getTerms().get(0); + Term constantB2 = answer.getTerms().get(1); + System.out + .println(" - " + term + " isPartOf " + constantB2); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); + } + } } } From b96f6fabf3fe6b1d9799f581717b10fd68149e78 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 9 May 2018 11:52:09 +0200 Subject: [PATCH 0023/1255] improve comments on examples --- .../vlog4j/examples/AddDataFromCSVFile.java | 3 - .../AddDataFromSparqlQueryResults.java | 10 +-- .../SkolemVsRestrictedChaseTermination.java | 75 ++++++++++++++----- .../examples/rdf/AddDataFromRDFModel.java | 8 +- 4 files changed, 68 insertions(+), 28 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromCSVFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromCSVFile.java index aa96fa1b3..84a927b69 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromCSVFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromCSVFile.java @@ -38,8 +38,6 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; public class AddDataFromCSVFile { - // FIXME: run on the same set if rules skolem (non-terminating, timeout, print - // the numbeer of generated facts), reset reasoner, run restricted chase public static void main(String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { @@ -82,7 +80,6 @@ public static void main(String[] args) Expressions.makeConjunction(bicycleIDBX)); // IsPartOfIDB(?x, !y) :- WheelIDB(?x) . - // Atom bycicleIDBY = Expressions.makeAtom(bicycleIDB, y); final Rule rule6 = Expressions.makeRule(Expressions.makeConjunction(isPartOfIDBXY), Expressions.makeConjunction(wheelIDBX)); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java index ea1072f49..2a6e9ab75 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/AddDataFromSparqlQueryResults.java @@ -43,7 +43,7 @@ /** * This is a simple example of adding data from the result of a SPARQL query on - * a remote database endpoint. In this example, we will query WikiData for + * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In this example, we will query WikiData for * titles of publications that have authors who have children together. * * @author Irina Dragoste @@ -117,7 +117,7 @@ public static void main(String[] args) * variables (title, mother and father). */ final Predicate titleOfPublicationThatHasAuthorsWhoParentTheSameChild = Expressions - .makePredicate("havePublicationsTogether", 3); + .makePredicate("publicationAndAuthorsWhoParentTheSameChild", 3); try (Reasoner reasoner = Reasoner.getInstance()) { @@ -139,7 +139,7 @@ public static void main(String[] args) Expressions.makeVariable("x"), Expressions.makeVariable("y"), Expressions.makeVariable("z")); /* We query the reasoner for facts of the SPARQL query result predicate. */ - System.out.println("Publications that have authors who parent the same child:"); + System.out.println("Titles of publications that have authors who parent the same child:"); try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, false)) { queryResultIterator.forEachRemaining(queryResult -> { List queryResultTerms = queryResult.getTerms(); @@ -155,8 +155,8 @@ public static void main(String[] args) Atom isFather = Expressions.makeAtom("isFather", Expressions.makeVariable("z")); Conjunction ruleHeadConjunction = Expressions.makeConjunction(haveChildrenTogether, isMother, isFather); /* - * haveChildrenTogetherRuleHeadAtom(y,z), isMother(y), isFather(z) :- - * titleOfPublicationThatHasAuthorsWhoParentTheSameChild(x,y,z) + * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- + * publicationAndAuthorsWhoParentTheSameChild(?x, ?y, ?z) */ Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(queryAtom)); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java index f330e452e..99c32db6e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemVsRestrictedChaseTermination.java @@ -48,7 +48,7 @@ public class SkolemVsRestrictedChaseTermination { public static void main(String[] args) throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { - // 1. Instantiating entities, rules and facts + /* 1. Instantiating entities, rules and facts */ final Predicate bicycleIDB = Expressions.makePredicate("BicycleIDB", 1); final Predicate bicycleEDB = Expressions.makePredicate("BicycleEDB", 1); final Predicate wheelIDB = Expressions.makePredicate("WheelIDB", 1); @@ -63,90 +63,131 @@ public static void main(String[] args) final Variable x = Expressions.makeVariable("x"); final Variable y = Expressions.makeVariable("y"); - // BicycleIDB(?x) :- BicycleEDB(?x) . + /* BicycleIDB(?x) :- BicycleEDB(?x) . */ final Atom bicycleIDBX = Expressions.makeAtom(bicycleIDB, x); final Atom bicycleEDBX = Expressions.makeAtom(bicycleEDB, x); final Rule rule1 = Expressions.makeRule(bicycleIDBX, bicycleEDBX); - // WheelIDB(?x) :- WheelEDB(?x) . + /* WheelIDB(?x) :- WheelEDB(?x) . */ final Atom wheelIDBX = Expressions.makeAtom(wheelIDB, x); final Atom wheelEDBX = Expressions.makeAtom(wheelEDB, x); final Rule rule2 = Expressions.makeRule(wheelIDBX, wheelEDBX); - // hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . + /* hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . */ final Atom hasPartIDBXY = Expressions.makeAtom(hasPartIDB, x, y); final Atom hasPartEDBXY = Expressions.makeAtom(hasPartEDB, x, y); final Rule rule3 = Expressions.makeRule(hasPartIDBXY, hasPartEDBXY); - // isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . + /* isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . */ final Atom isPartOfIDBXY = Expressions.makeAtom(isPartOfIDB, x, y); final Atom isPartOfEDBXY = Expressions.makeAtom(isPartOfEDB, x, y); final Rule rule4 = Expressions.makeRule(isPartOfIDBXY, isPartOfEDBXY); - // HasPartIDB(?x, !y), WheelIDB(!y) :- BicycleIDB(?x) . + /* + * HasPartIDB(?x, ?y), WheelIDB(?y) :- BicycleIDB(?x) . y - existential variable + */ final Atom wheelIDBY = Expressions.makeAtom(wheelIDB, y); final Rule rule5 = Expressions.makeRule(Expressions.makeConjunction(hasPartIDBXY, wheelIDBY), Expressions.makeConjunction(bicycleIDBX)); - // IsPartOfIDB(?x, !y), BicycleIDB(!y) :- WheelIDB(?x) . + /* + * IsPartOfIDB(?x, ?y), BicycleIDB(?y) :- WheelIDB(?x) . y - existential + * variable + */ final Atom bycicleIDBY = Expressions.makeAtom(bicycleIDB, y); final Rule rule6 = Expressions.makeRule(Expressions.makeConjunction(isPartOfIDBXY, bycicleIDBY), Expressions.makeConjunction(wheelIDBX)); - // IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . + /* IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . */ final Atom hasPartIDBYX = Expressions.makeAtom(hasPartIDB, y, x); final Rule rule7 = Expressions.makeRule(isPartOfIDBXY, hasPartIDBYX); - // HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . + /* HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . */ final Atom isPartOfIDBYX = Expressions.makeAtom(isPartOfIDB, y, x); final Rule rule8 = Expressions.makeRule(hasPartIDBXY, isPartOfIDBYX); - // BicycleEDB(bicycle1) . + /* BicycleEDB(bicycle1) . */ final Atom fact1 = Expressions.makeAtom(bicycleEDB, bicycle1); - // HasPartEDB(bicycle1, wheel1) . + /* HasPartEDB(bicycle1, wheel1) . */ final Atom fact2 = Expressions.makeAtom(hasPartEDB, bicycle1, wheel1); - // Wheel(wheel1) . + /* Wheel(wheel1) . */ final Atom fact3 = Expressions.makeAtom(wheelEDB, wheel1); - // BicycleEDB(b) . + /* BicycleEDB(b) . */ final Atom fact4 = Expressions.makeAtom(bicycleEDB, bicycle2); - // 2. Loading, reasoning, and querying. - // Use try-with resources, or remember to call close() to free the reasoner - // resources. + /* + * 2. Loading, reasoning, and querying. Use try-with resources, or remember to + * call close() to free the reasoner resources. + */ try (Reasoner reasoner = Reasoner.getInstance()) { reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); reasoner.addFacts(fact1, fact2, fact3, fact4); reasoner.load(); + /* See that there is no fact HasPartIDB before reasoning. */ System.out.println("Answers to query " + hasPartIDBXY + " before reasoning:"); printOutQueryAnswers(hasPartIDBXY, reasoner); + /* + * As the Skolem Chase is known not to terminate for this set of rules and + * facts, it is interrupted after one second. + */ reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); reasoner.setReasoningTimeout(1); System.out.println("Starting Skolem Chase with 1 second timeout."); + + /* Indeed, the Skolem Chase did not terminate before timeout. */ boolean skolemChaseFinished = reasoner.reason(); System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); + + /* + * See that the Skolem Chase generated a very large number of facts in 1 second, + * extensively introducing new unnamed individuals to satisfy existential + * restrictions. + */ System.out.println( "Answers to query " + hasPartIDBXY + " after reasoning with the Skolem Chase for 1 second:"); printOutQueryAnswers(hasPartIDBXY, reasoner); + /* + * We reset the reasoner and apply the Restricted Chase on the same set of rules + * and facts + */ System.out.println(); System.out.println("Reseting reasoner; discarding facts generated during reasoning."); reasoner.resetReasoner(); reasoner.load(); + /* + * See that there is no fact HasPartIDB before reasoning. All inferred facts + * have been discarded when the reasoner was reset. + */ System.out.println("Answers to query " + hasPartIDBXY + " before reasoning:"); printOutQueryAnswers(hasPartIDBXY, reasoner); + /* + * As the Restricted Chase is known to terminate for this set of rules and + * facts, we will not interrupt it. + */ reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setReasoningTimeout(null); + long restrictedChaseStartTime = System.currentTimeMillis(); System.out.println("Starting Restricted Chase with no timeout."); + + /* Indeed, the Restricted Chase did terminate (in less than 1 second) */ boolean restrictedChaseFinished = reasoner.reason(); - System.out.println("Has Restricted Chase algorithm finished? " + restrictedChaseFinished); + long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; + System.out.println("Has Restricted Chase algorithm finished? " + restrictedChaseFinished + ". (Duration: " + + restrictedChaseDuration + " ms)"); + + /* + * See that the Restricted Chase generated a small number of facts, reusing + * individuals that satisfy existential restrictions. + */ System.out.println("Answers to query " + hasPartIDBXY + " after reasoning with the Restricted Chase:"); printOutQueryAnswers(hasPartIDBXY, reasoner); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java index 3371e9dc1..a18ac5b4a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRDFModel.java @@ -137,9 +137,9 @@ public static void main(String[] args) throws IOException, RDFParseException, RD varOganizationName); /* - * hasOrganizationName(person, organizationName) :- TRIPLE(person,, affiliation), - * TRIPLE(affiliation, , organization), - * TRIPLE(organization, , organizationName) + * hasOrganizationName(?person, ?organizationName) :- TRIPLE(?person, , ?affiliation), + * TRIPLE(?affiliation, , ?organization), + * TRIPLE(?organization, , ?organizationName) . */ final Rule organizationRule = Expressions.makeRule(creatorOrganizationName, personHasAffiliation, affiliationWithOrganization, organizationHasName); @@ -162,11 +162,13 @@ public static void main(String[] args) throws IOException, RDFParseException, RD /* We query for persons whose organization name is "TU Dresden" . */ final Constant constantTuDresdenOrganization = Expressions.makeConstant("\"TU Dresden\""); + /* hasOrganizationName(?person, "TU Dresden") */ @NonNull final Atom queryTUDresdenParticipantsAtISWC = Expressions.makeAtom(predicateHasOrganizationName, varPerson, constantTuDresdenOrganization); System.out.println("Participants at ISWC'16 and '17 from Organization 'TU Dresden':"); + System.out.println("( Answers to query " + queryTUDresdenParticipantsAtISWC + " )"); try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, false)) { queryResultIterator.forEachRemaining(answer -> System.out From ad6bb3bf00cd2357397605427bbdfb828a93b5dc Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 23 Jul 2018 14:42:24 +0200 Subject: [PATCH 0024/1255] change packaging of vlog4j-emaples from 'pom' to 'jar' --- vlog4j-examples/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 9568a950d..76e2fb9c9 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -11,7 +11,7 @@ vlog4j-examples - pom + jar VLog4j Examples Contains examples and usage instructions describing the basic functionality of VLog4j From e163a52cc576438c12b872d499a85c150d9b97e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Thu, 26 Jul 2018 17:52:07 +0200 Subject: [PATCH 0025/1255] updated link to JavaDoc --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4808d64fb..ee7a111f4 100644 --- a/README.md +++ b/README.md @@ -34,4 +34,4 @@ Documentation ------------- * The module **vlog4j-examples** includes short example programs that demonstrate some common use cases -* [JavaDoc](https://mkroetzsch.github.io/vlog4j/) is available online and through the Maven packages. +* [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. From fe4a665f429a7bc1cc6b25a63f75b5717dea843f Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 7 Aug 2018 15:27:35 +0200 Subject: [PATCH 0026/1255] added example on setting log files and log levels for the internal reasoner logging --- .../examples/ExportReasonerLoggingToFile.java | 98 ++++++++++++++++ .../src/main/logs/ReasonerDebugLogFile.log | 111 ++++++++++++++++++ .../src/main/logs/ReasonerInfoLogFile.log | 4 + .../src/main/logs/ReasonerWarningLogFile.log | 4 + 4 files changed, 217 insertions(+) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExportReasonerLoggingToFile.java create mode 100644 vlog4j-examples/src/main/logs/ReasonerDebugLogFile.log create mode 100644 vlog4j-examples/src/main/logs/ReasonerInfoLogFile.log create mode 100644 vlog4j-examples/src/main/logs/ReasonerWarningLogFile.log diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExportReasonerLoggingToFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExportReasonerLoggingToFile.java new file mode 100644 index 000000000..754f25b97 --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExportReasonerLoggingToFile.java @@ -0,0 +1,98 @@ +package org.semanticweb.vlog4j.examples; + +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeAtom; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConjunction; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeRule; +import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeVariable; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import org.eclipse.jdt.annotation.NonNull; +import org.eclipse.jdt.annotation.Nullable; +import org.semanticweb.vlog4j.core.model.api.Atom; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; + +/** + * This class exemplifies setting a log file and log level for VLog reasoner logging information (like materialisation duration, number of iterations, number of + * derivations). + * - * @throws IOException if I/O exceptions occur during - * reasoning. - * @throws ReasonerStateException if this method is called before - * loading ({@link Reasoner#load()} - * or after closing - * ({@link Reasoner#close()}). - * @throws IncompatiblePredicateArityException - * @throws EdbIdbSeparationException + * @throws IOException if I/O exceptions occur during reasoning. */ - boolean reason() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException; + boolean reason() throws IOException; // TODO add examples to query javadoc /** @@ -394,18 +343,9 @@ boolean reason() * results. Otherwise, the query results will only contain * the facts with terms of type {@link TermType#CONSTANT} * (representing named individuals). - * @return an {@link AutoCloseable} iterator for {@link QueryResult}s, - * representing distinct answers to the query. - * @throws ReasonerStateException if this method is called before loading - * ({@link Reasoner#load()} or after closing - * ({@link Reasoner#close()}). - * - * @throws IllegalArgumentException if the given {@code query} contains terms - * ({@link Atom#getTerms()}) which are not of - * type {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}. + * @return QueryResultIterator that represents distinct answers to the query. */ - QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) throws ReasonerStateException; + QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks); // TODO add examples to query javadoc /** @@ -444,35 +384,20 @@ boolean reason() * the facts with terms of type {@link TermType#CONSTANT} * (representing named individuals). * - * @throws ReasonerStateException if this method is called before loading - * ({@link Reasoner#load()} or after closing - * ({@link Reasoner#close()}). - * @throws IOException if an I/O error occurs regarding given file - * ({@code csvFilePath)}. - * @throws IllegalArgumentException - *
    - *
  • if the given {@code queryAtom} contains - * terms ({@link Atom#getTerms()}) which are - * not of type {@link TermType#CONSTANT} or - * {@link TermType#VARIABLE}.
  • - *
  • if the given {@code csvFilePath} does - * not end with .csv - * extension.
  • - *
+ * @throws IOException if an I/O error occurs regarding given file + * ({@code csvFilePath)}. */ // TODO update javadoc with return type MaterialisationState exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) - throws ReasonerStateException, IOException; + throws IOException; /** * Resets the reasoner to a pre-loading state (before the call of * {@link #load()} method). All facts inferred by reasoning are discarded. Rules * and facts added to the reasoner need to be loaded again, to be able to * perform querying and reasoning. - * - * @throws ReasonerStateException if the method is called on a closed reasoner. */ - void resetReasoner() throws ReasonerStateException; + void resetReasoner(); // TODO Map exportDBToDir(File location); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index ccde18e68..56d915054 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -12,7 +12,6 @@ import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; @@ -271,7 +270,7 @@ public KnowledgeBase getKnowledgeBase() { @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); - warnClosed(); + validateNotClosed(); this.algorithm = algorithm; } @@ -282,7 +281,7 @@ public Algorithm getAlgorithm() { @Override public void setReasoningTimeout(Integer seconds) { - warnClosed(); + validateNotClosed(); if (seconds != null) { Validate.isTrue(seconds > 0, "Only strictly positive timeout period alowed!", seconds); } @@ -296,7 +295,7 @@ public Integer getReasoningTimeout() { @Override public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - warnClosed(); + validateNotClosed(); Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); this.ruleRewriteStrategy = ruleRewritingStrategy; } @@ -307,7 +306,7 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { } @Override - public void load() throws IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void load() throws IOException { validateNotClosed(); final LoadKbVisitor visitor = new LoadKbVisitor(); @@ -362,6 +361,13 @@ String getDataSourceConfigurationString() { return configStringBuilder.toString(); } + /** + * Checks if the loaded external data sources do in fact contain data of the + * correct arity. + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { for (final Predicate predicate : edbPredicates.keySet()) { validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); @@ -372,6 +378,16 @@ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityExcep } } + /** + * Checks if the loaded external data for a given source does in fact contain + * data of the correct arity for the given predidate. + * + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { if (dataSource == null) @@ -430,8 +446,7 @@ void loadRules() { } @Override - public boolean reason() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public boolean reason() throws IOException { switch (this.reasonerState) { case BEFORE_LOADING: load(); @@ -470,6 +485,8 @@ private void runChase() { } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { + // FIXME: the message generate here is not guaranteed to be the correct + // interpretation of the exception that is caught throw new RuntimeException( "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); @@ -477,7 +494,7 @@ private void runChase() { } @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) throws ReasonerStateException { + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); @@ -502,7 +519,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla @Override public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) throws ReasonerStateException, IOException { + final boolean includeBlanks) throws IOException { validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); @@ -525,7 +542,7 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, } @Override - public void resetReasoner() throws ReasonerStateException { + public void resetReasoner() { validateNotClosed(); // TODO what should happen to the KB? this.reasonerState = ReasonerState.BEFORE_LOADING; @@ -541,7 +558,7 @@ public void close() { } @Override - public void setLogLevel(LogLevel logLevel) throws ReasonerStateException { + public void setLogLevel(LogLevel logLevel) { validateNotClosed(); Validate.notNull(logLevel, "Log level cannot be null!"); this.internalLogLevel = logLevel; @@ -554,61 +571,66 @@ public LogLevel getLogLevel() { } @Override - public void setLogFile(String filePath) throws ReasonerStateException { + public void setLogFile(String filePath) { validateNotClosed(); this.vLog.setLogFile(filePath); } @Override - public boolean isJA() throws ReasonerStateException, NotStartedException { + public boolean isJA() { return checkAcyclicity(AcyclicityNotion.JA); } @Override - public boolean isRJA() throws ReasonerStateException, NotStartedException { + public boolean isRJA() { return checkAcyclicity(AcyclicityNotion.RJA); } @Override - public boolean isMFA() throws ReasonerStateException, NotStartedException { + public boolean isMFA() { return checkAcyclicity(AcyclicityNotion.MFA); } @Override - public boolean isRMFA() throws ReasonerStateException, NotStartedException { + public boolean isRMFA() { return checkAcyclicity(AcyclicityNotion.RMFA); } @Override - public boolean isMFC() throws ReasonerStateException, NotStartedException { + public boolean isMFC() { + validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } - final CyclicCheckResult checkCyclic = this.vLog.checkCyclic("MFC"); - if (checkCyclic.equals(CyclicCheckResult.CYCLIC)) { - return true; + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic("MFC"); + } catch (NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible } - return false; + return checkCyclic.equals(CyclicCheckResult.CYCLIC); } - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) - throws ReasonerStateException, NotStartedException { + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); if (this.reasonerState == ReasonerState.BEFORE_LOADING) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } - final CyclicCheckResult checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - if (checkCyclic.equals(CyclicCheckResult.NON_CYCLIC)) { - return true; + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible } - return false; + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); } @Override - public CyclicityResult checkForCycles() throws ReasonerStateException, NotStartedException { + public CyclicityResult checkForCycles() { final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); if (acyclic) { return CyclicityResult.ACYCLIC; @@ -684,13 +706,4 @@ void validateNotClosed() throws ReasonerStateException { } } - /** - * Check if reasoner is closed and log a warning if it is. - */ - void warnClosed() { - if (this.reasonerState == ReasonerState.AFTER_CLOSING) { - LOGGER.warn("Meaningless operation performed on a closed reasoner object."); - } - } - } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 161858d51..8ea46a921 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -32,9 +32,6 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -67,8 +64,7 @@ public class LoggingTest { // any time @Test - public void testSetLogFileNull() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testSetLogFileNull() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(null); reasoner.setLogLevel(LogLevel.INFO); @@ -80,8 +76,7 @@ public void testSetLogFileNull() } @Test - public void testSetLogFileInexistent() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testSetLogFileInexistent() throws IOException { final String inexistentFilePath = LOGS_FOLDER + "a/b"; try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -97,15 +92,14 @@ public void testSetLogFileInexistent() } @Test(expected = NullPointerException.class) - public void testSetLogLevelNull() throws ReasonerStateException { + public void testSetLogLevelNull() { try (final Reasoner instance = Reasoner.getInstance()) { instance.setLogLevel(null); } } @Test - public void testSetLogFileAppendsToFile() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testSetLogFileAppendsToFile() throws IOException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; @@ -130,8 +124,7 @@ public void testSetLogFileAppendsToFile() } @Test - public void testLogLevelInfo() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testLogLevelInfo() throws IOException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); @@ -150,8 +143,7 @@ public void testLogLevelInfo() } @Test - public void testLogLevelDebug() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testLogLevelDebug() throws IOException { final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index 6706b5845..d56aa3380 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -38,9 +38,6 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.Timeout; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -118,13 +115,12 @@ public static void setUpBeforeClass() { } @Before - public void setUp() throws ReasonerStateException { + public void setUp() { this.reasoner = new VLogReasoner(kb); } @Test - public void skolem() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void skolem() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); @@ -134,8 +130,7 @@ public void skolem() } @Test - public void restricted() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void restricted() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); @@ -145,8 +140,7 @@ public void restricted() } @Test - public void skolemAfterLoad() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void skolemAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); this.reasoner.load(); @@ -157,8 +151,7 @@ public void skolemAfterLoad() } @Test - public void restrictedAfterLoad() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public void restrictedAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); this.reasoner.load(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 8e1ecc788..a53a28fa2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -30,8 +30,6 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; @@ -42,15 +40,12 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class AddDataSourceTest { private static final String CSV_FILE_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFacts.csv"; @Test - public void testAddDataSourceExistentDataForDifferentPredicates() throws ReasonerStateException, - EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceExistentDataForDifferentPredicates() throws IOException { final Predicate predicateParity1 = Expressions.makePredicate("p", 1); final Constant constantA = Expressions.makeConstant("a"); final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); @@ -80,8 +75,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws Reasone } @Test - public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceBeforeLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -98,8 +92,7 @@ public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbI // TODO rewrite test @Ignore @Test(expected = ReasonerStateException.class) - public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceAfterLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -116,8 +109,7 @@ public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbId // TODO rewrite test @Ignore @Test(expected = ReasonerStateException.class) - public void testAddDataSourceAfterReasoning() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testAddDataSourceAfterReasoning() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -132,11 +124,11 @@ public void testAddDataSourceAfterReasoning() throws ReasonerStateException, Edb } } - //FIXME decide how to handle datasources with multiple predicates + // FIXME decide how to handle datasources with multiple predicates @Ignore // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { + public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); @@ -144,12 +136,12 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws Reasoner kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } - - //FIXME decide how to handle datasources with multiple predicates + + // FIXME decide how to handle datasources with multiple predicates @Ignore // TODO move to a test class for KnowledgeBase @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException, IOException { + public void testAddDataSourceNoFactsForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 030e6db11..90b6fa877 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -34,9 +34,6 @@ import org.junit.Assert; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -51,13 +48,10 @@ import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import karmaresearch.vlog.EDBConfigurationException; - public class AnswerQueryTest { @Test - public void testEDBQuerySameConstantSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Constant constantC = Expressions.makeConstant("c"); final Constant constantD = Expressions.makeConstant("d"); @@ -101,8 +95,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() } @Test - public void testIDBQuerySameBlankSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testIDBQuerySameBlankSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Variable x = Expressions.makeVariable("X"); final Variable y = Expressions.makeVariable("Y"); @@ -144,8 +137,7 @@ public void testIDBQuerySameBlankSubstitutesSameVariableName() } @Test - public void testIDBQuerySameIndividualSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testIDBQuerySameIndividualSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Variable x = Expressions.makeVariable("X"); final Variable y = Expressions.makeVariable("Y"); @@ -216,8 +208,7 @@ public void testIDBQuerySameIndividualSubstitutesSameVariableName() } @Test - public void queryResultWithBlanks() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void queryResultWithBlanks() throws IOException { final Variable vx = Expressions.makeVariable("x"); final Variable vy = Expressions.makeVariable("y"); // P(x) -> Q(y) @@ -251,8 +242,7 @@ public void queryResultWithBlanks() } @Test(expected = IllegalArgumentException.class) - public void queryEmptyKnowledgeBaseBeforeReasoning() - throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -264,8 +254,7 @@ public void queryEmptyKnowledgeBaseBeforeReasoning() } @Test(expected = IllegalArgumentException.class) - public void queryEmptyKnowledgeBaseAfterReasoning() - throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyKnowledgeBaseAfterReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -279,8 +268,7 @@ public void queryEmptyKnowledgeBaseAfterReasoning() } @Test - public void queryEmptyRules() - throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); final Fact fact = Expressions.makeFact("P", Arrays.asList(Expressions.makeConstant("c"))); kb.addStatement(fact); @@ -302,8 +290,7 @@ public void queryEmptyRules() } @Test - public void queryEmptyFacts() throws EDBConfigurationException, IOException, EdbIdbSeparationException, - ReasonerStateException, IncompatiblePredicateArityException { + public void queryEmptyFacts() throws IOException { final Variable vx = Expressions.makeVariable("x"); final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), Expressions.makePositiveLiteral("p", vx)); @@ -319,7 +306,7 @@ public void queryEmptyFacts() throws EDBConfigurationException, IOException, Edb Assert.assertFalse(queryResultIterator.hasNext()); queryResultIterator.close(); } - + final PositiveLiteral queryAtom2 = Expressions.makePositiveLiteral("q", Expressions.makeVariable("?x")); try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom2, true)) { Assert.assertFalse(queryResultIterator.hasNext()); @@ -331,7 +318,7 @@ public void queryEmptyFacts() throws EDBConfigurationException, IOException, Edb try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom1, true)) { assertFalse(queryResultIteratorAfterReason.hasNext()); } - + try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom2, true)) { assertFalse(queryResultIteratorAfterReason.hasNext()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index ab02fdac0..7898e0e4e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -33,9 +33,6 @@ import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -102,8 +99,7 @@ public static void testConstructor(final FileDataSource fileDataSource, final Fi } public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, - final FileDataSource emptyFileDataSource) - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + final FileDataSource emptyFileDataSource) throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, emptyFileDataSource)); @@ -122,8 +118,7 @@ public static void testLoadEmptyFile(final Predicate predicate, final PositiveLi } } - public static void testNoFactsOverPredicate(final Reasoner reasoner, final PositiveLiteral queryAtom) - throws ReasonerStateException { + public static void testNoFactsOverPredicate(final Reasoner reasoner, final PositiveLiteral queryAtom) { try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, true)) { assertFalse(answerQuery.hasNext()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 402568bb0..fc21e98f8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -30,9 +30,6 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -77,8 +74,7 @@ public class GeneratedAnonymousIndividualsTest { } @Test - public void testBlanksSkolemChaseNoRuleRewrite() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksSkolemChaseNoRuleRewrite() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); @@ -94,8 +90,7 @@ public void testBlanksSkolemChaseNoRuleRewrite() } @Test - public void testBlanksSkolemChaseSplitHeadPieces() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksSkolemChaseSplitHeadPieces() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); @@ -114,8 +109,7 @@ public void testBlanksSkolemChaseSplitHeadPieces() } @Test - public void testBlanksRestrictedChaseNoRuleRewrite() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksRestrictedChaseNoRuleRewrite() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); @@ -131,8 +125,7 @@ public void testBlanksRestrictedChaseNoRuleRewrite() } @Test - public void testBlanksRestrictedChaseSplitHeadPieces() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testBlanksRestrictedChaseSplitHeadPieces() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { @@ -164,8 +157,7 @@ public void testBlanksRestrictedChaseSplitHeadPieces() } } - private void checkTwoDistinctBlanksGenerated(final Reasoner reasoner) - throws ReasonerStateException, IOException, EdbIdbSeparationException { + private void checkTwoDistinctBlanksGenerated(final Reasoner reasoner) throws IOException { // expected facts: P(c, _:b1), P(c, _:b2) final List> csvContentIncludeBlanks = FileDataSourceTestUtils.getCSVContent(includeBlanksFilePath); assertTrue(csvContentIncludeBlanks.size() == 2); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 8a0764a37..208fcd4c0 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -34,8 +34,6 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -83,8 +81,7 @@ public void testSetReasoningTimeout() { // FIXME update test @Ignore @Test(expected = ReasonerStateException.class) - public void testAddRules1() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddRules1() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.getKnowledgeBase().addStatement(ruleQxPx); reasoner.load(); @@ -92,8 +89,7 @@ public void testAddRules1() } @Test - public void testAddRules2() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddRules2() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(ruleQxPx); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -114,8 +110,7 @@ public void testAddRules3() { // FIXME update test @Ignore @Test - public void testAddFacts1() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddFacts1() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(factPc); @@ -125,8 +120,7 @@ public void testAddFacts1() } @Test(expected = NullPointerException.class) - public void testAddFacts2() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testAddFacts2() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); final List facts = new ArrayList<>(); @@ -140,30 +134,28 @@ public void testAddFacts2() } @Test - public void testResetBeforeLoad() throws ReasonerStateException { + public void testResetBeforeLoad() { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.resetReasoner(); } } @Test(expected = NullPointerException.class) - public void setRuleRewriteStrategy1() throws ReasonerStateException { + public void setRuleRewriteStrategy1() { try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.setRuleRewriteStrategy(null); } } @Test - public void setRuleRewriteStrategy3() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void setRuleRewriteStrategy3() { try (final Reasoner reasoner = Reasoner.getInstance();) { reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); } } @Test - public void testResetDiscardInferences() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testResetDiscardInferences() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(ruleQxPx, factPc); @@ -198,8 +190,7 @@ public void testResetDiscardInferences() } @Test - public void testResetKeepExplicitDatabase() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testResetKeepExplicitDatabase() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(ruleQxPx); // assert p(c) @@ -229,8 +220,7 @@ public void testResetKeepExplicitDatabase() } } - private void checkExplicitFacts(final Reasoner reasoner, final Predicate predicateR1) - throws ReasonerStateException { + private void checkExplicitFacts(final Reasoner reasoner, final Predicate predicateR1) { try (final QueryResultIterator queryResultIteratorPx = reasoner.answerQuery(ruleBodyPx, true)) { assertTrue(queryResultIteratorPx.hasNext()); assertEquals(factPc.getTerms(), queryResultIteratorPx.next().getTerms()); @@ -245,8 +235,7 @@ private void checkExplicitFacts(final Reasoner reasoner, final Predicate predica } @Test - public void testResetEmptyKnowledgeBase() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testResetEmptyKnowledgeBase() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -266,24 +255,15 @@ public void testResetEmptyKnowledgeBase() } } - @Test - public void testFailReasonBeforeLoad() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.reason(); - } - - } - @Test(expected = ReasonerStateException.class) - public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { + public void testFailAnswerQueryBeforeLoad() { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) - public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { + public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); @@ -291,8 +271,7 @@ public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateExcep } @Test - public void testSuccessiveCloseAfterLoad() - throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void testSuccessiveCloseAfterLoad() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { reasoner.load(); reasoner.close(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index a0f57bef4..aa4ec6a9f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -30,18 +30,16 @@ import org.apache.commons.lang3.StringUtils; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; public class SparqlQueryResultDataSourceTest { - + final URL endpoint = new URL("http://query.wikidata.org/sparql"); - - public SparqlQueryResultDataSourceTest() throws MalformedURLException {} - + + public SparqlQueryResultDataSourceTest() throws MalformedURLException { + } + @Test public void testToStringSimpleConstructor() throws MalformedURLException { final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, "b,a", @@ -65,17 +63,15 @@ public void testToStringList() throws MalformedURLException { } @Test(expected = IllegalArgumentException.class) - public void testEmptyQueryBodyList() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - + public void testEmptyQueryBodyList() throws IOException { + final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("a"))); new SparqlQueryResultDataSource(endpoint, queryVariables, StringUtils.SPACE); } @Test(expected = IllegalArgumentException.class) - public void testEmptyQueryBody() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testEmptyQueryBody() throws IOException { new SparqlQueryResultDataSource(endpoint, "a", StringUtils.SPACE); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java index fb0b13a35..7be7634e2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java @@ -30,9 +30,6 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -42,8 +39,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class VLogReasonerBasics { final String constantNameC = "c"; @@ -61,8 +56,7 @@ public class VLogReasonerBasics { final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); @Test - public void testCloseRepeatedly() - throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void testCloseRepeatedly() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.close(); } @@ -75,8 +69,7 @@ public void testCloseRepeatedly() } @Test - public void testLoadRules() - throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException, ReasonerStateException { + public void testLoadRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); @@ -86,8 +79,7 @@ public void testLoadRules() } @Test - public void testSimpleInference() throws EDBConfigurationException, IOException, ReasonerStateException, - EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testSimpleInference() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 18d1b81f6..b26d50f03 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -31,9 +31,6 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -75,8 +72,7 @@ public VLogReasonerCombinedInputs() throws IOException { } @Test - public void samePredicateSourceFactRule() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateSourceFactRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(qFromCsv, factQc, factPd, rulePimpliesQ); @@ -90,8 +86,7 @@ public void samePredicateSourceFactRule() } @Test - public void samePredicateFactSourceRule() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateFactSourceRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(factQc, factPd, qFromCsv, rulePimpliesQ); @@ -105,8 +100,7 @@ public void samePredicateFactSourceRule() } @Test - public void samePredicateRuleFactSource() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateRuleFactSource() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(rulePimpliesQ, factQc, factPd, qFromCsv); @@ -120,8 +114,7 @@ public void samePredicateRuleFactSource() } @Test - public void samePredicateSourceSource() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateSourceSource() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(qFromCsv, qCDFromCsv); @@ -135,8 +128,7 @@ public void samePredicateSourceSource() } @Test - public void samePredicateSourceFactFact() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateSourceFactFact() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(qFromCsv, factQc, factQd); @@ -150,8 +142,7 @@ public void samePredicateSourceFactFact() } @Test - public void samePredicateFactsRule() - throws IOException, ReasonerStateException, IncompatiblePredicateArityException, EdbIdbSeparationException { + public void samePredicateFactsRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(factPd, factQc, factQc1, factQc2, rulePimpliesQ); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java index 36555fda1..b9e5580e6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -34,9 +34,7 @@ import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; @@ -44,8 +42,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class VLogReasonerCsvInput { private static final Predicate unaryPredicate1 = Expressions.makePredicate("p", 1); @@ -59,8 +55,7 @@ public class VLogReasonerCsvInput { // FIXME: test ignored because of a bug in VLog. Remore the @Ignore annotation // after bug is fixed. @Test - public void testLoadEmptyCsvFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadEmptyCsvFile() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, makeVariable("x")); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, @@ -70,17 +65,14 @@ public void testLoadEmptyCsvFile() } @Test - public void testLoadUnaryFactsFromCsvFile() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testLoadUnaryFactsFromCsvFile() throws IOException { testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz"))); } - private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) - throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, - IncompatiblePredicateArityException { + private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate2, fileDataSource)); @@ -110,8 +102,7 @@ private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource file * @throws IncompatiblePredicateArityException */ @Test(expected = IOException.class) - public void testLoadNonexistingCsvFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadNonexistingCsvFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.csv"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); @@ -124,8 +115,7 @@ public void testLoadNonexistingCsvFile() } @Test(expected = IncompatiblePredicateArityException.class) - public void testLoadCsvFileWrongArity() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadCsvFileWrongArity() throws IOException { final FileDataSource fileDataSource = new CsvFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); final KnowledgeBase kb = new KnowledgeBase(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java index e116aefda..34531cc3b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -28,9 +28,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -41,8 +38,7 @@ public class VLogReasonerCsvOutput { @Test - public void testEDBQuerySameConstantSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { final String predicate = "p"; final Constant constantC = Expressions.makeConstant("c"); final Constant constantD = Expressions.makeConstant("d"); @@ -90,8 +86,7 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); @@ -106,8 +101,7 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); @@ -123,8 +117,7 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); @@ -141,8 +134,7 @@ public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() } @Test(expected = IllegalArgumentException.class) - public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java index ebd8be56c..801556108 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java @@ -34,9 +34,6 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -49,8 +46,7 @@ public class VLogReasonerNegation { @Test(expected = RuntimeException.class) - public void testNotStratifiable() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { + public void testNotStratifiable() throws IOException { final Variable x = makeVariable("x"); final Variable y = makeVariable("y"); @@ -71,8 +67,7 @@ public void testNotStratifiable() } @Test - public void testStratifiable() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { + public void testStratifiable() throws IOException { final Variable x = makeVariable("x"); final Variable y = makeVariable("y"); @@ -107,8 +102,7 @@ public void testStratifiable() } @Test - public void testInputNegation() - throws EdbIdbSeparationException, IncompatiblePredicateArityException, ReasonerStateException, IOException { + public void testInputNegation() throws IOException { final Variable x = makeVariable("x"); final Variable y = makeVariable("y"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java index 264237ebb..3b72ab8cd 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -35,9 +35,6 @@ import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; @@ -45,8 +42,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import karmaresearch.vlog.EDBConfigurationException; - public class VLogReasonerRdfInput { private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); @@ -61,40 +56,36 @@ public class VLogReasonerRdfInput { makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Ignore - //TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation after VLog bug is fixed. + // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation + // after VLog bug is fixed. @Test - public void testLoadEmptyRdfFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadEmptyRdfFile() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt"))); } @Ignore - //TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation after VLog bug is fixed. + // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation + // after VLog bug is fixed. @Test - public void testLoadEmptyRdfFileGz() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadEmptyRdfFileGz() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz"))); } @Test - public void testLoadTernaryFactsFromRdfFile() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testLoadTernaryFactsFromRdfFile() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt"))); } @Test - public void testLoadTernaryFactsFromRdfFileGz() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { + public void testLoadTernaryFactsFromRdfFileGz() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz"))); } - public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) - throws ReasonerStateException, EdbIdbSeparationException, EDBConfigurationException, IOException, - IncompatiblePredicateArityException { + public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); @@ -109,8 +100,7 @@ public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fil } @Test(expected = IOException.class) - public void testLoadNonexistingRdfFile() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadNonexistingRdfFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.nt"); assertFalse(nonexistingFile.exists()); final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); @@ -123,8 +113,7 @@ public void testLoadNonexistingRdfFile() } @Test - public void testLoadRdfInvalidFormat() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testLoadRdfInvalidFormat() throws IOException { final FileDataSource fileDataSource = new RdfFileDataSource(new File( FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); final KnowledgeBase kb = new KnowledgeBase(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java index b5a03c7e8..2922c48e4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -30,9 +30,7 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -53,8 +51,7 @@ public class VLogReasonerSparqlInput { */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQuery() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQuery() throws IOException { final URL endpoint = new URL("http://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); @@ -79,8 +76,7 @@ public void testSimpleSparqlQuery() @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQueryHttps() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQueryHttps() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); @@ -114,8 +110,7 @@ public void testSimpleSparqlQueryHttps() */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQuery2() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQuery2() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); @@ -138,8 +133,7 @@ public void testSimpleSparqlQuery2() @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test(expected = RuntimeException.class) - public void testConjunctiveQueryNewLineCharacterInQueryBody() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testConjunctiveQueryNewLineCharacterInQueryBody() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); @@ -159,8 +153,7 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testConjunctiveQuery() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testConjunctiveQuery() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); @@ -182,8 +175,7 @@ public void testConjunctiveQuery() } @Test(expected = IncompatiblePredicateArityException.class) - public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index c20ff05c0..d72cc644a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -24,7 +24,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; @@ -87,8 +86,6 @@ public static void main(final String[] args) throws IOException { } System.out.println("Done."); - } catch (final VLog4jException e) { - System.out.println("The reasoner encountered a problem: " + e.getMessage()); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 2d14c2438..281f48a32 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -25,7 +25,6 @@ import java.util.Arrays; import java.util.List; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; @@ -90,9 +89,6 @@ public static void main(final String[] args) throws IOException { } System.out.println("\nDone."); - } catch (final VLog4jException e) { - System.out.println("The reasoner encountered a problem:" + e.getMessage()); - } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index b3750e0bf..fea0dbe82 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -22,7 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; @@ -92,8 +91,6 @@ public static void main(final String[] args) throws IOException { ExamplesUtils.printOutQueryAnswers("inEuropeOutsideGermany(?Org)", reasoner); System.out.println("Done."); - } catch (final VLog4jException e) { - System.out.println("Error: " + e.getMessage()); } } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 371fcc530..7af9eb473 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -22,9 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -54,8 +51,7 @@ */ public class AddDataFromCsvFile { - public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, - IncompatiblePredicateArityException, ParsingException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index 48fda4f86..dd62026ce 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -22,9 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -61,8 +58,7 @@ */ public class AddDataFromRdfFile { - public static void main(final String[] args) throws EdbIdbSeparationException, IOException, ReasonerStateException, - IncompatiblePredicateArityException, ParsingException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); /* 1. Prepare rules and create some related vocabulary objects used later. */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 180931863..21b403dcb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -26,9 +26,6 @@ import java.util.LinkedHashSet; import java.util.List; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -77,8 +74,7 @@ public class AddDataFromSparqlQueryResults { */ private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; - public static void main(final String[] args) - throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 247b2007e..3ca012638 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -34,9 +34,6 @@ import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; @@ -95,8 +92,7 @@ public class ConfigureReasonerLogging { /* A(c,d) */ private static Fact fact = Expressions.makeFact("A_EDB", Arrays.asList(makeConstant("c"), makeConstant("d"))); - public static void main(final String[] args) - throws EdbIdbSeparationException, IncompatiblePredicateArityException, IOException, ReasonerStateException { + public static void main(final String[] args) throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index d07e4d6c5..256d211b5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -22,9 +22,6 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -45,8 +42,7 @@ */ public class SkolemVsRestrictedChaseTermination { - public static void main(final String[] args) throws ReasonerStateException, EdbIdbSeparationException, - IncompatiblePredicateArityException, IOException, ParsingException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index dc770022f..f429ff4a0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -28,9 +28,6 @@ import java.util.List; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; @@ -61,8 +58,7 @@ */ public class AddDataFromDlgpFile { - public static void main(final String[] args) - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { + public static void main(final String[] args) throws IOException { final List graalAtoms = new ArrayList<>(); final List graalRules = new ArrayList<>(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 4e9d31111..7d595dfad 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -24,9 +24,6 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; @@ -62,8 +59,7 @@ */ public class AddDataFromGraal { - public static void main(final String[] args) - throws ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) throws IOException { /* * 1. Instantiating rules */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 5c336ecd4..f203d401f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -27,9 +27,6 @@ import java.io.IOException; import java.net.URL; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -62,8 +59,7 @@ */ public class DoidExampleGraal { - public static void main(final String[] args) - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public static void main(final String[] args) throws IOException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 7beb9f6fb..3c3834942 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -29,9 +29,6 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -55,8 +52,7 @@ */ public class OwlOntologyToRulesAndFacts { - public static void main(final String[] args) throws OWLOntologyCreationException, ReasonerStateException, - EdbIdbSeparationException, IncompatiblePredicateArityException, IOException { + public static void main(final String[] args) throws OWLOntologyCreationException, IOException { /* Bike ontology is loaded from a Bike file using OWL API */ final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index 0d13eefe7..d5a81a085 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -37,9 +37,6 @@ import org.openrdf.rio.RDFParser; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -64,8 +61,8 @@ */ public class AddDataFromRdfModel { - public static void main(final String[] args) throws IOException, RDFParseException, RDFHandlerException, - URISyntaxException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public static void main(final String[] args) + throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { ExamplesUtils.configureLogging(); diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index 85c7f24b3..ea3dd7a46 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -40,9 +40,6 @@ import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -66,8 +63,7 @@ public class TestReasonOverRdfFacts { private static final Variable object = makeVariable("o"); @Test - public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandlerException, IOException, - ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToFacts(model); @@ -86,8 +82,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl } @Test - public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandlerException, IOException, - ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { + public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); final Set facts = RdfModelConverter.rdfModelToFacts(model); @@ -107,8 +102,7 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle } } - private Set> getQueryResults(final Reasoner reasoner, final PositiveLiteral query) - throws ReasonerStateException { + private Set> getQueryResults(final Reasoner reasoner, final PositiveLiteral query) { final QueryResultIterator queryResultIterator = reasoner.answerQuery(query, true); final Set> queryResults = new HashSet<>(); From cc1d2ae18ada7ba4082a8ab5288b3fe3cf4e457e Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 10:10:28 +0200 Subject: [PATCH 0407/1255] remove @Nullable annotation --- .../main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index a1c1cc72b..a47b50349 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -141,7 +141,6 @@ public static Reasoner getInstance() { * @return if not {@code null}, number of seconds after which the reasoning will * be interrupted, if it has not reached completion. */ - @Nullable Integer getReasoningTimeout(); /** From 66788a785cfac0bfb40041bf8d5c7b5ca67ea094 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 11:02:31 +0200 Subject: [PATCH 0408/1255] test reset reasoning timeout to null --- .../vlog4j/core/reasoner/Reasoner.java | 1 - .../core/reasoner/ReasonerTimeoutTest.java | 60 +++++++++++++------ 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index a47b50349..ff4654864 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -2,7 +2,6 @@ import java.io.IOException; -import org.eclipse.jdt.annotation.Nullable; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index d56aa3380..f93ca6b10 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.semanticweb.vlog4j.core.model.implementation.Expressions.makeConstant; /*- @@ -75,12 +77,23 @@ public class ReasonerTimeoutTest { private final static KnowledgeBase kb = new KnowledgeBase(); /** - * The timeout after which reasoning should be completed. One second is added to - * account for setup and tear down of reasoning resources. + * The timeout after which reasoning should be completed. */ @org.junit.Rule - public Timeout globalTimeout = Timeout.seconds(timeout + 1); - + public Timeout globalTimeout = Timeout.seconds(timeout * 5); + + private final static Predicate infinite_EDB = makePredicate("infinite_EDB", 2); + private final static Predicate infinite_IDB = makePredicate("infinite_IDB", 2); + private final static Variable x = makeVariable("x"); + private final static Variable y = makeVariable("y"); + + private final static PositiveLiteral infinite_IDB_xy = makePositiveLiteral(infinite_IDB, x, y); + private final static PositiveLiteral infinite_EDB_xy = makePositiveLiteral(infinite_EDB, x, y); + private final static Variable z = makeVariable("z"); + + private final static PositiveLiteral infinite_IDB_yz = makePositiveLiteral(infinite_IDB, y, z); + private final static Rule infinite_rule = makeRule(infinite_IDB_yz, infinite_IDB_xy); + /** * This method provides the {@link #facts} and {@link #rules} to be used in all * test runs. To test if the timeout works as expected, a small set of facts and @@ -90,24 +103,13 @@ public class ReasonerTimeoutTest { */ @BeforeClass public static void setUpBeforeClass() { - final Predicate infinite_EDB = makePredicate("infinite_EDB", 2); - final Predicate infinite_IDB = makePredicate("infinite_IDB", 2); facts.add(makeFact(infinite_EDB, Arrays.asList(makeConstant("A"), makeConstant("B")))); - final Variable x = makeVariable("x"); - final Variable y = makeVariable("y"); - - final PositiveLiteral infinite_IDB_xy = makePositiveLiteral(infinite_IDB, x, y); - final PositiveLiteral infinite_EDB_xy = makePositiveLiteral(infinite_EDB, x, y); final Rule import_rule = makeRule(infinite_IDB_xy, infinite_EDB_xy); rules.add(import_rule); - final Variable z = makeVariable("z"); - - final PositiveLiteral infinite_IDB_yz = makePositiveLiteral(infinite_IDB, y, z); - final Rule infinite_rule = makeRule(infinite_IDB_yz, infinite_IDB_xy); rules.add(infinite_rule); kb.addStatements(rules); @@ -126,7 +128,7 @@ public void skolem() throws IOException { this.reasoner.load(); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); } @Test @@ -136,7 +138,7 @@ public void restricted() throws IOException { this.reasoner.load(); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); } @Test @@ -147,7 +149,7 @@ public void skolemAfterLoad() throws IOException { this.reasoner.setReasoningTimeout(timeout); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); } @Test @@ -158,7 +160,27 @@ public void restrictedAfterLoad() throws IOException { this.reasoner.setReasoningTimeout(timeout); - this.reasoner.reason(); + assertFalse(this.reasoner.reason()); + } + + @Test + public void resetReasoningTimeoutToNull() throws IOException { + this.reasoner.setReasoningTimeout(timeout); + + this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + this.reasoner.load(); + assertFalse(this.reasoner.reason()); + + this.reasoner.resetReasoner(); + + final PositiveLiteral blocking_IDB_yx = makePositiveLiteral(infinite_IDB, y, x); + final Rule blockingRule = makeRule(blocking_IDB_yx, infinite_IDB_xy); + kb.addStatement(blockingRule); + + + this.reasoner.setReasoningTimeout(null); + this.reasoner.load(); + assertTrue(this.reasoner.reason()); } @After From 203b883b5df373f83080eb4838e89a7cfaeb2f78 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 11:15:24 +0200 Subject: [PATCH 0409/1255] renamed reasoner states --- .../vlog4j/core/reasoner/ReasonerState.java | 19 ++++---- .../reasoner/implementation/VLogReasoner.java | 44 +++++++++---------- 2 files changed, 32 insertions(+), 31 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java index 4dbf79176..121b6b4d8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java @@ -30,29 +30,30 @@ public enum ReasonerState { /** * State a Reasoner is in before method {@link Reasoner#load()} has been called. - * The Reasoner cannot reason before it has been loaded. The Reasoner can only - * be loaded once. Reasoning and querying are not allowed in this state. + * Querying is not allowed in this state. */ - BEFORE_LOADING("before loading"), + KB_NOT_LOADED("knowledge base not loaded"), /** * State a Reasoner is in after method {@link Reasoner#load()} has been called, * and before method {@link Reasoner#reason()} has been called. The Reasoner can + * be queried. */ - AFTER_LOADING("loaded"), + KB_LOADED("knowledge base loaded"), /** * State a Reasoner is in after method {@link Reasoner#reason()} has been + * called. */ - AFTER_REASONING("after reasoning"), + MATERIALISED("after reasoning"), /** * State in which the knowledge base of an already loaded reasoner has been * changed. This can occur if the knowledge base has been modified after loading - * (in {@link ReasonerState#AFTER_LOADING} state), or after reasoning (in - * {@link ReasonerState#AFTER_REASONING} state). + * (in {@link ReasonerState#KB_LOADED} state), or after reasoning (in + * {@link ReasonerState#MATERIALISED} state). */ - KNOWLEDGE_BASE_CHANGED("knowledge base changed"), + KB_CHANGED("knowledge base changed"), /** * State a Reasoner is in after method {@link Reasoner#close()} has been called. * The Reasoner cannot reason again, once it reached this state. Loading and @@ -60,7 +61,7 @@ public enum ReasonerState { * adding rules, fact and fact data sources and setting the rule re-writing * strategy are not allowed in this state. */ - AFTER_CLOSING("closed"); + CLOSED("closed"); private final String name; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 56d915054..936fabe14 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -242,7 +242,7 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { final Map> directEdbFacts = new HashMap<>(); final Set rules = new HashSet<>(); - private ReasonerState reasonerState = ReasonerState.BEFORE_LOADING; + private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private MaterialisationState materialisationState = MaterialisationState.INCOMPLETE; private LogLevel internalLogLevel = LogLevel.WARNING; @@ -334,7 +334,7 @@ public void load() throws IOException { loadFacts(); loadRules(); - this.reasonerState = ReasonerState.AFTER_LOADING; + this.reasonerState = ReasonerState.KB_LOADED; } String getDataSourceConfigurationString() { @@ -448,28 +448,28 @@ void loadRules() { @Override public boolean reason() throws IOException { switch (this.reasonerState) { - case BEFORE_LOADING: + case KB_NOT_LOADED: load(); runChase(); break; - case AFTER_LOADING: + case KB_LOADED: runChase(); break; - case KNOWLEDGE_BASE_CHANGED: - case AFTER_REASONING: + case KB_CHANGED: + case MATERIALISED: resetReasoner(); load(); runChase(); break; - case AFTER_CLOSING: + case CLOSED: throw new ReasonerStateException(this.reasonerState, "Reasoning is not allowed after closing."); } return this.reasoningCompleted; } private void runChase() { - this.reasonerState = ReasonerState.AFTER_REASONING; + this.reasonerState = ReasonerState.MATERIALISED; final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; try { @@ -496,7 +496,7 @@ private void runChase() { @Override public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); @@ -521,7 +521,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); @@ -545,14 +545,14 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, public void resetReasoner() { validateNotClosed(); // TODO what should happen to the KB? - this.reasonerState = ReasonerState.BEFORE_LOADING; + this.reasonerState = ReasonerState.KB_NOT_LOADED; this.vLog.stop(); LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); } @Override public void close() { - this.reasonerState = ReasonerState.AFTER_CLOSING; + this.reasonerState = ReasonerState.CLOSED; this.knowledgeBase.deleteListener(this); this.vLog.stop(); } @@ -599,7 +599,7 @@ public boolean isRMFA() { @Override public boolean isMFC() { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } @@ -615,7 +615,7 @@ public boolean isMFC() { private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { validateNotClosed(); - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "checking rules acyclicity is not allowed before loading!"); } @@ -662,18 +662,18 @@ public void onStatementAdded(Statement statementAdded) { } private void updateReasonerToKnowledgeBaseChanged() { - if (this.reasonerState.equals(ReasonerState.AFTER_LOADING) - || this.reasonerState.equals(ReasonerState.AFTER_REASONING)) { + if (this.reasonerState.equals(ReasonerState.KB_LOADED) + || this.reasonerState.equals(ReasonerState.MATERIALISED)) { - this.reasonerState = ReasonerState.KNOWLEDGE_BASE_CHANGED; + this.reasonerState = ReasonerState.KB_CHANGED; this.materialisationState = MaterialisationState.WRONG; } } // private void updateReasonerStateToKnowledgeBaseChanged() { -// if (this.reasonerState.equals(ReasonerState.AFTER_LOADING) -// || this.reasonerState.equals(ReasonerState.AFTER_REASONING)) { -// this.reasonerState = ReasonerState.KNOWLEDGE_BASE_CHANGED; +// if (this.reasonerState.equals(ReasonerState.KB_LOADED) +// || this.reasonerState.equals(ReasonerState.MATERIALISED)) { +// this.reasonerState = ReasonerState.KB_CHANGED; // } // } @@ -689,7 +689,7 @@ private void updateReasonerToKnowledgeBaseChanged() { // } // private void updateMaterialisationStateOnStatementsAdded(boolean materialisationInvalidated) { -// if (this.reasonerState.equals(ReasonerState.KNOWLEDGE_BASE_CHANGED) && materialisationInvalidated) { +// if (this.reasonerState.equals(ReasonerState.KB_CHANGED) && materialisationInvalidated) { // this.materialisationState = MaterialisationState.WRONG; // } // } @@ -700,7 +700,7 @@ private void updateReasonerToKnowledgeBaseChanged() { * @throws ReasonerStateException */ void validateNotClosed() throws ReasonerStateException { - if (this.reasonerState == ReasonerState.AFTER_CLOSING) { + if (this.reasonerState == ReasonerState.CLOSED) { LOGGER.error("Invalid operation requested on a closed reasoner object."); throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner."); } From d59dd602fa7ebdeb17d6507cad41da6d342afddf Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 16:09:01 +0200 Subject: [PATCH 0410/1255] loading in several states --- .../vlog4j/core/reasoner/ReasonerState.java | 8 +-- .../reasoner/implementation/VLogReasoner.java | 56 +++++++++++++------ .../VLogReasonerCombinedInputs.java | 10 ++-- 3 files changed, 50 insertions(+), 24 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java index 121b6b4d8..4fb79e735 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java @@ -56,10 +56,10 @@ public enum ReasonerState { KB_CHANGED("knowledge base changed"), /** * State a Reasoner is in after method {@link Reasoner#close()} has been called. - * The Reasoner cannot reason again, once it reached this state. Loading and - * setting the reasoning algorithm in this state are ineffective. Reasoning, - * adding rules, fact and fact data sources and setting the rule re-writing - * strategy are not allowed in this state. + * The Reasoner cannot reason again, once it reached this state. Loading, + * reasoning, adding rules, fact and fact data sources, setting the rule + * re-writing strategy, the reasoning algorithm and the reasoning timeout. are + * not allowed in this state. */ CLOSED("closed"); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 936fabe14..f402df282 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -260,6 +260,8 @@ public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; this.knowledgeBase.addListener(this); + + setLogLevel(this.internalLogLevel); } @Override @@ -308,7 +310,24 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { @Override public void load() throws IOException { validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; + } + } + void loadKnowledgeBase() throws IOException { final LoadKbVisitor visitor = new LoadKbVisitor(); visitor.clearIndexes(); for (final Statement statement : knowledgeBase) { @@ -326,8 +345,6 @@ public void load() throws IOException { } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration.", e); } - // TODO: can't we set this earlier? Why here? - setLogLevel(this.internalLogLevel); validateDataSourcePredicateArities(); @@ -335,6 +352,9 @@ public void load() throws IOException { loadRules(); this.reasonerState = ReasonerState.KB_LOADED; + + //TODO: if there are no rules, then materialisation state is complete + this.materialisationState = MaterialisationState.INCOMPLETE; } String getDataSourceConfigurationString() { @@ -414,11 +434,11 @@ void loadFacts() { aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); } try { - String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate)); + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); + final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate)); this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { - for (String[] tuple : vLogPredicateTuples) { + for (final String[] tuple : vLogPredicateTuples) { LOGGER.debug( "Loaded direct fact " + vLogPredicateName + "(" + Arrays.deepToString(tuple) + ")"); } @@ -436,7 +456,7 @@ void loadRules() { try { this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); if (LOGGER.isDebugEnabled()) { - for (karmaresearch.vlog.Rule rule : vLogRuleArray) { + for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { LOGGER.debug("Loaded rule " + rule.toString()); } } @@ -447,6 +467,8 @@ void loadRules() { @Override public boolean reason() throws IOException { + validateNotClosed(); + switch (this.reasonerState) { case KB_NOT_LOADED: load(); @@ -455,16 +477,18 @@ public boolean reason() throws IOException { case KB_LOADED: runChase(); break; - case KB_CHANGED: - case MATERIALISED: resetReasoner(); load(); runChase(); break; - case CLOSED: - throw new ReasonerStateException(this.reasonerState, "Reasoning is not allowed after closing."); + case MATERIALISED: + runChase(); + break; + default: + break; } + return this.reasoningCompleted; } @@ -479,18 +503,19 @@ private void runChase() { } else { this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); } - this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE - : MaterialisationState.INCOMPLETE; } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { - // FIXME: the message generate here is not guaranteed to be the correct + // FIXME: the message generated here is not guaranteed to be the correct // interpretation of the exception that is caught throw new RuntimeException( "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); } + + this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE + : MaterialisationState.INCOMPLETE; } @Override @@ -544,7 +569,6 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, @Override public void resetReasoner() { validateNotClosed(); - // TODO what should happen to the KB? this.reasonerState = ReasonerState.KB_NOT_LOADED; this.vLog.stop(); LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); @@ -607,7 +631,7 @@ public boolean isMFC() { CyclicCheckResult checkCyclic; try { checkCyclic = this.vLog.checkCyclic("MFC"); - } catch (NotStartedException e) { + } catch (final NotStartedException e) { throw new RuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.CYCLIC); @@ -623,7 +647,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { CyclicCheckResult checkCyclic; try { checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (NotStartedException e) { + } catch (final NotStartedException e) { throw new RuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index b26d50f03..43d54c2dc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -27,6 +27,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -56,10 +57,11 @@ public class VLogReasonerCombinedInputs { final Fact factPd = Expressions.makeFact("p", Arrays.asList(Expressions.makeConstant("d"))); final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, Arrays.asList(Expressions.makeVariable("x"))); - final Set> resultsCC1C2D = Set.of(Collections.singletonList(Expressions.makeConstant("c")), - Collections.singletonList(Expressions.makeConstant("c1")), - Collections.singletonList(Expressions.makeConstant("c2")), - Collections.singletonList(Expressions.makeConstant("d"))); + final Set> resultsCC1C2D = new HashSet<>( + Arrays.asList(Collections.singletonList(Expressions.makeConstant("c")), + Collections.singletonList(Expressions.makeConstant("c1")), + Collections.singletonList(Expressions.makeConstant("c2")), + Collections.singletonList(Expressions.makeConstant("d")))); final DataSourceDeclaration qFromCsv; final DataSourceDeclaration qCDFromCsv; From f3d9cedfec158f14bf35856edc84c662fa432d8b Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 16:19:14 +0200 Subject: [PATCH 0411/1255] make sure default logging level is set --- .../vlog4j/core/reasoner/LoggingTest.java | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 8ea46a921..9134e43dc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -162,6 +162,35 @@ public void testLogLevelDebug() throws IOException { } + @Test + public void testLogLevelDefault() throws IOException { + final String defaultLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; + assertFalse(new File(defaultLogFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(defaultLogFilePath); + + reasoner.load(); + reasoner.reason(); + reasoner.close(); + } + final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); + + final String warningLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; + assertFalse(new File(warningLogFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(warningLogFilePath); + reasoner.setLogLevel(LogLevel.WARNING); + reasoner.load(); + reasoner.reason(); + reasoner.close(); + } + final int countLinesReasonLogLevelWarning = readFile(warningLogFilePath); + + assertTrue(countLinesReasonLogLevelDefault == countLinesReasonLogLevelWarning); + } + private int readFile(final String logFilePath) throws IOException, FileNotFoundException { int countLines = 0; assertTrue(new File(logFilePath).exists()); From 063b46573cabcfd7f705619c47b268afdfcab645 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 22 Aug 2019 18:11:45 +0200 Subject: [PATCH 0412/1255] small unit test materialisation state --- .../model/implementation/Expressions.java | 27 +++++++++++ .../core/reasoner/MaterialisationState.java | 18 +++++-- .../reasoner/implementation/VLogReasoner.java | 26 ++++++---- .../implementation/ReasonerStateTest.java | 48 +++++++++++++++++-- 4 files changed, 104 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index 7a89a56a6..54a87edf9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -113,6 +113,21 @@ public static Fact makeFact(final String predicateName, final List terms) return new FactImpl(predicate, terms); } + + /** + * Creates a {@code Fact}. + * + * @param predicateName on-blank {@link Predicate} name + * @param terms non-empty, non-null array of non-null terms + * @return a {@link Fact} with given {@code terms} and {@link Predicate} + * constructed from name given {@code predicateName} and {@code arity} + * given {@code terms} size. + */ + public static Fact makeFact(final String predicateName, Term... terms) { + final Predicate predicate = makePredicate(predicateName, terms.length); + + return new FactImpl(predicate, Arrays.asList(terms)); + } /** * Creates a {@code Fact}. @@ -125,6 +140,18 @@ public static Fact makeFact(final String predicateName, final List terms) public static Fact makeFact(final Predicate predicate, final List terms) { return new FactImpl(predicate, terms); } + + /** + * Creates a {@code Fact}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null array of non-null terms. Array size must + * be the same as the given {@code predicate} arity. + * @return a {@link Fact} corresponding to the input. + */ + public static Fact makeFact(final Predicate predicate, final Term... terms) { + return new FactImpl(predicate, Arrays.asList(terms)); + } /** * Creates a {@code PositiveLiteral}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java index fed24a7b9..3e405eae8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java @@ -29,23 +29,33 @@ */ public enum MaterialisationState { - //TODO should we have different states for incomplete due to halting, vs incomplete due to adding facts for non-negated rules? /** * Reasoning has not completed. Query answering yields sound, but possibly * incomplete answers. */ - INCOMPLETE, + INCOMPLETE("incomplete"), /** * Query answering may give incorrect answers. Re-materialisation * ({@link Reasoner#reason()}) is required, in order to obtain correct results. */ - WRONG, + WRONG("wrong"), /** * Reasoning over current knowledge base is complete, and query answering yields * sound and complete results. */ - COMPLETE + COMPLETE("complete"); + + private final String name; + + private MaterialisationState(String name) { + this.name = name; + } + + @Override + public String toString() { + return name; + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index f402df282..7a7b2011c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -260,7 +260,7 @@ public VLogReasoner(KnowledgeBase knowledgeBase) { super(); this.knowledgeBase = knowledgeBase; this.knowledgeBase.addListener(this); - + setLogLevel(this.internalLogLevel); } @@ -310,7 +310,7 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { @Override public void load() throws IOException { validateNotClosed(); - + switch (this.reasonerState) { case KB_NOT_LOADED: loadKnowledgeBase(); @@ -352,8 +352,8 @@ void loadKnowledgeBase() throws IOException { loadRules(); this.reasonerState = ReasonerState.KB_LOADED; - - //TODO: if there are no rules, then materialisation state is complete + + // TODO: if there are no rules, then materialisation state is complete this.materialisationState = MaterialisationState.INCOMPLETE; } @@ -435,7 +435,8 @@ void loadFacts() { } try { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(directEdbFacts.get(predicate)); + final String[][] vLogPredicateTuples = ModelToVLogConverter + .toVLogFactTuples(directEdbFacts.get(predicate)); this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { @@ -468,7 +469,7 @@ void loadRules() { @Override public boolean reason() throws IOException { validateNotClosed(); - + switch (this.reasonerState) { case KB_NOT_LOADED: load(); @@ -488,7 +489,7 @@ public boolean reason() throws IOException { default: break; } - + return this.reasoningCompleted; } @@ -513,7 +514,7 @@ private void runChase() { "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); } - + this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE : MaterialisationState.INCOMPLETE; } @@ -539,6 +540,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } + logWarningOnMaterialisationState(); return new QueryResultIterator(stringQueryResultIterator, this.materialisationState); } @@ -563,9 +565,17 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, throw new IllegalArgumentException(MessageFormat.format( "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } + + logWarningOnMaterialisationState(); return this.materialisationState; } + private void logWarningOnMaterialisationState() { + if (this.materialisationState != MaterialisationState.COMPLETE) { + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.materialisationState); + } + } + @Override public void resetReasoner() { validateNotClosed(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 208fcd4c0..4db277b56 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -46,6 +47,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -55,14 +57,14 @@ public class ReasonerStateTest { private static final Predicate q = Expressions.makePredicate("q", 1); private static final Variable x = Expressions.makeVariable("x"); private static final Constant c = Expressions.makeConstant("c"); - // private static final Constant d = Expressions.makeConstant("d"); + private static final Constant d = Expressions.makeConstant("d"); private static final PositiveLiteral exampleQueryAtom = Expressions.makePositiveLiteral("q", x); private static final PositiveLiteral ruleHeadQx = Expressions.makePositiveLiteral(q, x); private static final PositiveLiteral ruleBodyPx = Expressions.makePositiveLiteral(p, x); private static final Rule ruleQxPx = Expressions.makeRule(ruleHeadQx, ruleBodyPx); - private static final Fact factPc = Expressions.makeFact(p, Arrays.asList(c)); - // private static final Atom factPd = Expressions.makeAtom(q, d); + private static final Fact factPc = Expressions.makeFact(p, c); + private static final Fact factPd = Expressions.makeFact(p, d); @Test(expected = NullPointerException.class) public void testSetAlgorithm() { @@ -78,6 +80,46 @@ public void testSetReasoningTimeout() { } } + @Test + public void testAddFactsAndQuery() throws IOException { + try (final Reasoner reasoner = Reasoner.getInstance();) { + reasoner.getKnowledgeBase().addStatement(factPc); + reasoner.load(); + + final PositiveLiteral query = Expressions.makePositiveLiteral(p, x); + final Set> expectedAnswersC = new HashSet<>( + Arrays.asList(Collections.singletonList(c))); + + try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); + + assertEquals(expectedAnswersC, queryAnswersC); + } + + + reasoner.getKnowledgeBase().addStatement(factPd); + + try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); + } + + reasoner.load(); + + + try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + + final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); + + final Set> expectedAnswersCD = new HashSet<>( + Arrays.asList(Collections.singletonList(c), Collections.singletonList(d))); + assertEquals(expectedAnswersCD, queryAnswersD); + } + } + } + // FIXME update test @Ignore @Test(expected = ReasonerStateException.class) From 9449d5b14541acbbfb27664210d7927053e781e8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 22 Aug 2019 22:28:32 +0200 Subject: [PATCH 0413/1255] Log main reasoning steps; fewer println's in examples --- .../reasoner/implementation/VLogReasoner.java | 18 ++++++++++++++---- .../vlog4j/examples/CountingTriangles.java | 7 +------ .../vlog4j/examples/DoidExample.java | 7 +------ .../examples/SimpleReasoningExample.java | 6 ------ .../SkolemVsRestrictedChaseTermination.java | 8 +++----- 5 files changed, 19 insertions(+), 27 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 7a7b2011c..b97f48dd5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -328,6 +328,7 @@ public void load() throws IOException { } void loadKnowledgeBase() throws IOException { + LOGGER.info("Started loading knowledge base ..."); final LoadKbVisitor visitor = new LoadKbVisitor(); visitor.clearIndexes(); for (final Statement statement : knowledgeBase) { @@ -355,6 +356,8 @@ void loadKnowledgeBase() throws IOException { // TODO: if there are no rules, then materialisation state is complete this.materialisationState = MaterialisationState.INCOMPLETE; + + LOGGER.info("Finished loading knowledge base."); } String getDataSourceConfigurationString() { @@ -494,6 +497,7 @@ public boolean reason() throws IOException { } private void runChase() { + LOGGER.info("Started materialisation of inferences ..."); this.reasonerState = ReasonerState.MATERIALISED; final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; @@ -504,7 +508,6 @@ private void runChase() { } else { this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); } - } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { @@ -515,8 +518,13 @@ private void runChase() { e); } - this.materialisationState = this.reasoningCompleted ? MaterialisationState.COMPLETE - : MaterialisationState.INCOMPLETE; + if (this.reasoningCompleted) { + this.materialisationState = MaterialisationState.COMPLETE; + LOGGER.info("Completed materialisation of inferences."); + } else { + this.materialisationState = MaterialisationState.INCOMPLETE; + LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); + } } @Override @@ -572,7 +580,8 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, private void logWarningOnMaterialisationState() { if (this.materialisationState != MaterialisationState.COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.materialisationState); + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", + this.materialisationState); } } @@ -589,6 +598,7 @@ public void close() { this.reasonerState = ReasonerState.CLOSED; this.knowledgeBase.deleteListener(this); this.vLog.stop(); + LOGGER.info("Reasoner closed."); } @Override diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index d72cc644a..3a4fc8cc5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -58,13 +58,10 @@ public static void main(final String[] args) throws IOException { try (VLogReasoner reasoner = new VLogReasoner(kb)) { /* Initialise reasoner and compute inferences */ - System.out.print("Initialising rules and data sources ... "); reasoner.load(); - System.out.println("completed."); - System.out.print("Reasoning (including SPARQL query answering) ... "); + System.out.println("Note: Materialisation includes SPARQL query answering."); reasoner.reason(); - System.out.println("completed."); /* Execute queries */ try { @@ -84,8 +81,6 @@ public static void main(final String[] args) throws IOException { } catch (final ParsingException e) { System.out.println("Failed to parse query: " + e.getMessage()); } - - System.out.println("Done."); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 281f48a32..9a686b44c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -66,13 +66,10 @@ public static void main(final String[] args) throws IOException { reasoner.setLogLevel(LogLevel.DEBUG); /* Initialise reasoner and compute inferences */ - System.out.print("Initialising rules and data sources ... "); reasoner.load(); - System.out.println("completed."); - System.out.print("Reasoning (including SPARQL query answering) ... "); + System.out.println("Note: Materialisation includes SPARQL query answering."); reasoner.reason(); - System.out.println("completed."); /* Execute some queries */ final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); @@ -87,8 +84,6 @@ public static void main(final String[] args) throws IOException { System.out.println("Failed to parse query: " + e.getMessage()); } } - - System.out.println("\nDone."); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index fea0dbe82..81a3362f9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -77,20 +77,14 @@ public static void main(final String[] args) throws IOException { } try (final Reasoner reasoner = new VLogReasoner(kb)) { - System.out.print("Loading knowledge base ... "); reasoner.load(); - System.out.println("done."); - System.out.print("Computing all inferences ... "); reasoner.reason(); - System.out.println("done.\n"); /* Execute some queries */ ExamplesUtils.printOutQueryAnswers("address(?Org, ?Street, ?ZIP, ?City)", reasoner); ExamplesUtils.printOutQueryAnswers("locatedIn(?place, europe)", reasoner); ExamplesUtils.printOutQueryAnswers("inEuropeOutsideGermany(?Org)", reasoner); - - System.out.println("Done."); } } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 256d211b5..d147a6a19 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -82,9 +82,8 @@ public static void main(final String[] args) throws IOException, ParsingExceptio */ reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); reasoner.setReasoningTimeout(1); - System.out.print("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ... "); + System.out.println("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ..."); final boolean skolemChaseFinished = reasoner.reason(); - System.out.println("done."); /* Verify that the Skolem Chase did not terminate before timeout. */ System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); @@ -101,7 +100,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * We reset the reasoner and apply the Restricted Chase on the same set of rules * and facts */ - System.out.println("\nReseting reasoner; discarding facts generated during reasoning."); + System.out.println(); reasoner.resetReasoner(); reasoner.load(); @@ -119,9 +118,8 @@ public static void main(final String[] args) throws IOException, ParsingExceptio reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setReasoningTimeout(null); final long restrictedChaseStartTime = System.currentTimeMillis(); - System.out.print("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); + System.out.println("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); reasoner.reason(); - System.out.println("done."); /* The Restricted Chase terminates: */ final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; From 45115be91a2dfb9c5792a10cc4a7fe9f9a6fb430 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 11:50:02 +0200 Subject: [PATCH 0414/1255] More efficient conversion of Facts to VLog --- .../implementation/ModelToVLogConverter.java | 11 +++++------ .../implementation/TermToVLogConverter.java | 15 ++++++++++----- .../reasoner/implementation/VLogReasoner.java | 3 +-- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java index 7ca9246ae..a83e6132c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java @@ -26,7 +26,6 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; @@ -66,21 +65,21 @@ static karmaresearch.vlog.Term[] toVLogTermArray(final List terms) { static String[][] toVLogFactTuples(final Collection facts) { final String[][] tuples = new String[facts.size()][]; int i = 0; - for (final PositiveLiteral atom : facts) { - final String[] vLogFactTuple = ModelToVLogConverter.toVLogFactTuple(atom); + for (final Fact fact : facts) { + final String[] vLogFactTuple = ModelToVLogConverter.toVLogFactTuple(fact); tuples[i] = vLogFactTuple; i++; } return tuples; } - static String[] toVLogFactTuple(final PositiveLiteral fact) { + static String[] toVLogFactTuple(final Fact fact) { final List terms = fact.getTerms(); final String[] vLogFactTuple = new String[terms.size()]; int i = 0; for (final Term term : terms) { - final karmaresearch.vlog.Term vLogTupleTerm = toVLogTerm(term); - vLogFactTuple[i] = vLogTupleTerm.getName(); + // No checks for type of term -- only constants allowed in facts! + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForConstant(term.getName()); i++; } return vLogFactTuple; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java index 509b5978d..cfa887714 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java @@ -40,12 +40,17 @@ class TermToVLogConverter implements TermVisitor { */ @Override public karmaresearch.vlog.Term visit(Constant term) { - if (term.getName().startsWith("\"")) { // keep datatype literal strings unchanged - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); - } else if (term.getName().contains(":")) { // enclose IRIs with < > - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "<" + term.getName() + ">"); + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + getVLogNameForConstant(term.getName())); + } + + public static String getVLogNameForConstant(String vLog4jConstantName) { + if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged + return vLog4jConstantName; + } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > + return "<" + vLog4jConstantName + ">"; } else { // keep relative IRIs unchanged - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + return vLog4jConstantName; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b97f48dd5..cb1684627 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -443,8 +443,7 @@ void loadFacts() { this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug( - "Loaded direct fact " + vLogPredicateName + "(" + Arrays.deepToString(tuple) + ")"); + LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); } } } catch (final EDBConfigurationException e) { From 2bbac7b8a929db97e8e38d6185de8f79a5f2831f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 14:46:12 +0200 Subject: [PATCH 0415/1255] Efficient in-memory data source --- .../implementation/InMemoryDataSource.java | 84 +++++++++++++++++++ .../reasoner/implementation/VLogReasoner.java | 63 +++++++++++--- 2 files changed, 135 insertions(+), 12 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java new file mode 100644 index 000000000..ac9a76569 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -0,0 +1,84 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.util.Arrays; + +import org.semanticweb.vlog4j.core.model.api.DataSource; + +/** + * A {@link DataSource} for representing a large number of facts that were + * generated in Java. Rather than making {@link Fact} objects for all of them, + * the object will directly accept tuples of constant names that are internally + * stored in a form that can be passed to the reasoner directly, thereby saving + * memory and loading time. + * + * @author Markus Kroetzsch + * + */ +public class InMemoryDataSource implements DataSource { + + String[][] data; + int nextEmptyTuple = 0; + int capacity; + final int arity; + + /** + * Create a new in-memory data source for facts of the specified arity. The + * given capacity is the initial size of the space allocated. For best + * efficiency, the actual number of facts should exactly correspond to this + * capacity. + * + * @param arity the number of parameters in a fact from this source + * @param initialCapacity the planned number of facts + */ + public InMemoryDataSource(int arity, int initialCapacity) { + this.capacity = initialCapacity; + this.arity = arity; + data = new String[initialCapacity][arity]; + } + + /** + * Adds a fact to this data source. The number of constant names must agree with + * the arity of this data source. + * + * @param constantNames the string names of the constants in this fact + */ + public void addTuple(String... constantNames) { + if (constantNames.length != arity) { + throw new IllegalArgumentException("This data source holds tuples of arity " + arity + + ". Adding a tuple of size " + constantNames.length + " is not possible."); + } + if (nextEmptyTuple == capacity) { + capacity = capacity * 2; + this.data = Arrays.copyOf(data, capacity); + } + data[nextEmptyTuple] = new String[arity]; + for (int i = 0; i < arity; i++) { + data[nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstant(constantNames[i]); + } + nextEmptyTuple++; + } + + /** + * Returns the data stored in this data source, in the format expected by the + * VLog reasoner backend. + * + * @return the data + */ + public String[][] getData() { + if (nextEmptyTuple == capacity) { + return this.data; + } else { + return Arrays.copyOf(this.data, this.nextEmptyTuple); + } + } + + /** + * Returns null to indicate that this {@link DataSource} cannot be passed to + * VLog in a configuration string. + */ + @Override + public String toConfigString() { + return null; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index cb1684627..471f83d32 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -340,12 +340,13 @@ void loadKnowledgeBase() throws IOException { } try { - this.vLog.start(getDataSourceConfigurationString(), false); + this.vLog.start(getDataSourcesConfigurationString(), false); } catch (final AlreadyStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration.", e); } + loadInMemoryDataSources(); validateDataSourcePredicateArities(); @@ -360,30 +361,37 @@ void loadKnowledgeBase() throws IOException { LOGGER.info("Finished loading knowledge base."); } - String getDataSourceConfigurationString() { + String getDataSourcesConfigurationString() { final StringBuilder configStringBuilder = new StringBuilder(); final Formatter formatter = new Formatter(configStringBuilder); int dataSourceIndex = 0; for (final Predicate predicate : this.edbPredicates.keySet()) { final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - if (dataSourceDeclaration.getDataSource() != null) { - formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - dataSourceIndex++; - } + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, + dataSourceIndex, formatter); } for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - if (dataSourceDeclaration.getDataSource() != null) { - formatter.format(dataSourceDeclaration.getDataSource().toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(aliasPredicate)); - dataSourceIndex++; - } + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, + dataSourceIndex, formatter); } formatter.close(); return configStringBuilder.toString(); } + int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, + Formatter formatter) { + if (dataSource != null) { + String configString = dataSource.toConfigString(); + if (configString != null) { + formatter.format(dataSource.toConfigString(), dataSourceIndex, + ModelToVLogConverter.toVLogPredicate(predicate)); + return dataSourceIndex + 1; + } + } + return dataSourceIndex; + } + /** * Checks if the loaded external data sources do in fact contain data of the * correct arity. @@ -401,6 +409,37 @@ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityExcep } } + void loadInMemoryDataSources() { + for (final Predicate predicate : this.edbPredicates.keySet()) { + final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); + } + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); + } + } + + void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { + final InMemoryDataSource inMemoryDataSource; + if (dataSource instanceof InMemoryDataSource) { + inMemoryDataSource = (InMemoryDataSource) dataSource; + } else { + return; + } + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + } + /** * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. From 95d7216c704620c94d14446700d1dc742e55fcdb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 14:46:32 +0200 Subject: [PATCH 0416/1255] Example for in-memory data source --- .../vlog4j/examples/ExamplesUtils.java | 27 +++++-- .../InMemoryGraphAnalysisExample.java | 78 +++++++++++++++++++ 2 files changed, 99 insertions(+), 6 deletions(-) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index b6a456484..7ea5be93a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -84,10 +84,8 @@ public static void configureLogging() { * * @param queryAtom query to be answered * @param reasoner reasoner to query on - * @throws ReasonerStateException in case the reasoner has not yet been loaded. */ - public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) - throws ReasonerStateException { + public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { System.out.println("Answers to query " + queryAtom + " :"); try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { answers.forEachRemaining(answer -> System.out.println(" - " + answer)); @@ -101,10 +99,8 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * * @param queryAtom query to be answered * @param reasoner reasoner to query on - * @throws ReasonerStateException in case the reasoner has not yet been loaded. */ - public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) - throws ReasonerStateException { + public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) { try { PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); printOutQueryAnswers(query, reasoner); @@ -113,6 +109,25 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner } } + /** + * Returns the number of answers returned by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + * @throws ReasonerStateException in case the reasoner has not yet been loaded. + */ + public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { + try { + PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + try (final QueryResultIterator answers = reasoner.answerQuery(query, true)) { + return iteratorSize(answers); + } + } catch (ParsingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + /** * Returns the size of an iterator. * diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java new file mode 100644 index 000000000..cb8b88f6f --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -0,0 +1,78 @@ +package org.semanticweb.vlog4j.examples; + +import java.io.IOException; + +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +/** + * This example shows how to reason efficiently with data sets generated in + * Java. We generate a random graph with several million edges, check + * connectivity, and count triangles. + * + * Parameters can be modified to obtain graphs of different sizes and density. + * It should be noted, however, that the number of triangles in reasonably dense + * graphs tends to be huge, and it is easy to exhaust memory in this way. + * + * @author Markus Kroetzsch + * + */ +public class InMemoryGraphAnalysisExample { + + public static void main(String[] args) throws ParsingException, IOException { + ExamplesUtils.configureLogging(); + + /* 1. Create a simple random graph */ + System.out.println("Generating random graph ..."); + int vertexCount = 10000; + double density = 0.03; + // initialise data source for storing edges (estimate how many we'll need) + InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); + int edgeCount = 0; + for (int i = 1; i <= vertexCount; i++) { + for (int j = 1; j <= vertexCount; j++) { + if (Math.random() < density) { + edges.addTuple("v" + i, "v" + j); + edgeCount++; + } + } + } + // also make a unary data source to mark vertices: + InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); + for (int i = 1; i <= vertexCount; i++) { + vertices.addTuple("v" + i); + } + System.out.println("Generated " + edgeCount + " edges in random graph of " + vertexCount + " vertices."); + + /* 2. Initialise database with random data and some rules */ + + final String rules = "" // + + "biedge(?X,?Y) :- edge(?X,?Y), edge(?Y,?X) ." // + + "connected(v1) ." // + + "connected(?X) :- connected(?Y), biedge(?Y,?X) ." // + + "unreachable(?X) :- vertex(?X), ~connected(?X) . " // + + "triangle(?X, ?Y, ?Z) :- biedge(?X,?Y), biedge(?Y, ?Z), biedge(?Z,?X) ."; + + final KnowledgeBase kb = RuleParser.parse(rules); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("vertex", 1), vertices)); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("edge", 2), edges)); + + /* 3. Use reasoner to compute some query results */ + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + System.out.println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + + ExamplesUtils.getQueryAnswerCount("unreachable(?X)", reasoner)); + System.out.println("Number of bi-directional triangles: " + + ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6); + } + } + +} From 673139e86970856df0e0ad19f27b0d9b948c4bc4 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 23:11:22 +0200 Subject: [PATCH 0417/1255] new example to compare DBpedia with Wikidata --- .../examples/CompareWikidataDBpedia.java | 84 +++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java new file mode 100644 index 000000000..4eaa42e3c --- /dev/null +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -0,0 +1,84 @@ +package org.semanticweb.vlog4j.examples; + +import java.io.IOException; + +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +/** + * This example shows how to integrate and compare the contents of two SPARQL + * endpoints, in this case for Wikidata and DBpedia. We are asking both sources + * for the same information (each using their terms to express it), and query + * for related English Wikipedia article URLs as a key to integrate the data + * over. For a fair comparison, we restrict to Wikidata entities that have a + * related English Wikipedia page (others cannot be in English DBpedia in the + * first place). + * + * The example query used asks for alumni of the University of Leipzig (one of + * the oldest European universities). + * + * @author Markus Kroetzsch + * + */ +public class CompareWikidataDBpedia { + + /** + * SPARQL pattern snippet to find an English Wikipedia page URL from a Wikidata + * entity ?result. + */ + static String sparqlGetWikiIriWikidata = "?enwikipage schema:about ?result ; " + + "schema:isPartOf . "; + /** + * SPARQL pattern snippet to find an English Wikipedia page URL from a DBpedia + * entity ?result. Some string magic is needed to replace the outdated http + * protocol used in DBpedia's Wikidata page names by the current https. + */ + static String sparqlGetWikiIriDBpedia = "?result ?enwikipageHttp . " + + "BIND( IRI(CONCAT(\"https\",SUBSTR(str(?enwikipageHttp), 5))) AS ?enwikipage)"; + + public static void main(String[] args) throws ParsingException, IOException { + ExamplesUtils.configureLogging(); + + // Wikidata pattern: P69 is "educated at"; Q154804 is "University of Leipzig" + String wikidataSparql = "?result wdt:P69 wd:Q154804 . " + sparqlGetWikiIriWikidata; + // DBpedia pattern: + String dbpediaSparql = "?result . " + + sparqlGetWikiIriDBpedia; + + // Configure the SPARQL data sources and some rules to analyse results: + String rules = "" // + + "@prefix wdqs: ." // + + "@prefix dbp: ." // + + "@source dbpResult(2) : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // + + "@source wdResult(2) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "% Rules:\n" // + + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // + + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // + + "result(?Wikipage) :- inWd(?Wikipage)." // + + "result(?Wikipage) :- inDbp(?Wikipage)." // + + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage)." + + "dbpOnly(?DbpId,?Wikipage) :- dbpResult(?DbpId,?Wikipage), ~inWd(?Wikipage)." + + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage)." + ""; // + + final KnowledgeBase kb = RuleParser.parse(rules); + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); + int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); + int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); + + System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + + " were in Wikidata and " + dbpCount + " were in DBPedia"); + + System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); + ExamplesUtils.printOutQueryAnswers("dbpOnly(?X,?Y)", reasoner); + } + } + +} From 56a5ad06086066823dabca21530c32cd6b94f5a5 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 23 Aug 2019 23:15:28 +0200 Subject: [PATCH 0418/1255] nicer output --- .../semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 4eaa42e3c..d716b76bf 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -60,7 +60,7 @@ public static void main(String[] args) throws ParsingException, IOException { + "result(?Wikipage) :- inWd(?Wikipage)." // + "result(?Wikipage) :- inDbp(?Wikipage)." // + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage)." - + "dbpOnly(?DbpId,?Wikipage) :- dbpResult(?DbpId,?Wikipage), ~inWd(?Wikipage)." + + "dbpOnly(?Wikipage) :- inDbp(?Wikipage), ~inWd(?Wikipage)." + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage)." + ""; // final KnowledgeBase kb = RuleParser.parse(rules); @@ -77,7 +77,7 @@ public static void main(String[] args) throws ParsingException, IOException { + " were in Wikidata and " + dbpCount + " were in DBPedia"); System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); - ExamplesUtils.printOutQueryAnswers("dbpOnly(?X,?Y)", reasoner); + ExamplesUtils.printOutQueryAnswers("dbpOnly(?X)", reasoner); } } From 5d2ebd6ce4aa7b59793d1443a57e4f92e97ed9dd Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 25 Aug 2019 14:26:56 +0200 Subject: [PATCH 0419/1255] Add note about imperfection of mapping --- .../semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index d716b76bf..460888c59 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -78,6 +78,10 @@ public static void main(String[] args) throws ParsingException, IOException { System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); ExamplesUtils.printOutQueryAnswers("dbpOnly(?X)", reasoner); + + System.out.println("Note: some of these results might still be in Wikidata, due to:\n" + + "* recent Wikipedia article renamings that are not updated in DBpedia\n" + + "* failure to match Wikipedia URLs due to small differences in character encoding\n"); } } From 61c66080ee10793962951961b4b6354e7103b42d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 27 Aug 2019 10:45:39 +0200 Subject: [PATCH 0420/1255] Warn & return empty result (instead of exception) --- .../core/reasoner/QueryResultIterator.java | 32 ++++++++++ .../vlog4j/core/reasoner/Reasoner.java | 1 - .../EmptyQueryResultIterator.java | 60 +++++++++++++++++++ .../implementation/InMemoryDataSource.java | 20 +++++++ ...ator.java => VLogQueryResultIterator.java} | 7 +-- .../reasoner/implementation/VLogReasoner.java | 8 ++- .../vlog4j/core/reasoner/LoggingTest.java | 5 +- .../implementation/AddDataSourceTest.java | 1 + .../implementation/AnswerQueryTest.java | 16 +++-- .../FileDataSourceTestUtils.java | 1 + .../implementation/QueryResultsUtils.java | 1 + .../implementation/ReasonerStateTest.java | 1 + .../implementation/VLogReasonerBasics.java | 1 + .../VLogReasonerCombinedInputs.java | 1 + .../implementation/VLogReasonerCsvInput.java | 1 + .../implementation/VLogReasonerNegation.java | 1 + .../implementation/VLogReasonerRdfInput.java | 1 + .../VLogReasonerSparqlInput.java | 1 + .../examples/CompareWikidataDBpedia.java | 20 +++++++ .../vlog4j/examples/DoidExample.java | 2 +- .../vlog4j/examples/ExamplesUtils.java | 4 +- .../InMemoryGraphAnalysisExample.java | 20 +++++++ .../core/AddDataFromSparqlQueryResults.java | 2 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/DoidExampleGraal.java | 2 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 2 +- .../examples/rdf/AddDataFromRdfModel.java | 2 +- .../vlog4j/rdf/TestReasonOverRdfFacts.java | 2 +- 28 files changed, 194 insertions(+), 23 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QueryResultIterator.java => VLogQueryResultIterator.java} (89%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java new file mode 100644 index 000000000..c2a7ee746 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java @@ -0,0 +1,32 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Iterator; + +import org.semanticweb.vlog4j.core.model.api.QueryResult; + +public interface QueryResultIterator extends Iterator, AutoCloseable { + + public MaterialisationState getMaterialisationState(); + + public void close(); +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index ff4654864..d8872e987 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -8,7 +8,6 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import karmaresearch.vlog.Atom; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java new file mode 100644 index 000000000..268c27371 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -0,0 +1,60 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.QueryResult; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; + +/** + * Iterator that represents an empty query result. + * + * @author Markus Kroetzsch + * + */ +public class EmptyQueryResultIterator implements QueryResultIterator { + + final MaterialisationState materialisationState; + + public EmptyQueryResultIterator(MaterialisationState materialisationState) { + this.materialisationState = materialisationState; + } + + @Override + public void close() { + // nothing to do + } + + @Override + public boolean hasNext() { + return false; + } + + @Override + public QueryResult next() { + return null; + } + + public MaterialisationState getMaterialisationState() { + return this.materialisationState; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index ac9a76569..0071c210c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.Arrays; import org.semanticweb.vlog4j.core.model.api.DataSource; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java index 3e0933c75..fe91caae8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java @@ -20,10 +20,9 @@ * #L% */ -import java.util.Iterator; - import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; @@ -35,14 +34,14 @@ * @author Irina Dragoste * */ -public class QueryResultIterator implements Iterator, AutoCloseable { +public class VLogQueryResultIterator implements QueryResultIterator { private final TermQueryResultIterator vLogTermQueryResultIterator; private final MaterialisationState materialisationState; // TODO add reasoningState to constructor - public QueryResultIterator(final TermQueryResultIterator termQueryResultIterator, + public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, final MaterialisationState materialisationState) { this.vLogTermQueryResultIterator = termQueryResultIterator; this.materialisationState = materialisationState; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 471f83d32..1eaf10614 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -582,12 +583,13 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { - throw new IllegalArgumentException(MessageFormat.format( - "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty."); + return new EmptyQueryResultIterator(MaterialisationState.COMPLETE); } logWarningOnMaterialisationState(); - return new QueryResultIterator(stringQueryResultIterator, this.materialisationState); + return new VLogQueryResultIterator(stringQueryResultIterator, this.materialisationState); } @Override diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 9134e43dc..dbc3c7bc7 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.reasoner; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -177,7 +178,7 @@ public void testLogLevelDefault() throws IOException { final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); final String warningLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; - assertFalse(new File(warningLogFilePath).exists()); + //assertFalse(new File(warningLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(warningLogFilePath); @@ -188,7 +189,7 @@ public void testLogLevelDefault() throws IOException { } final int countLinesReasonLogLevelWarning = readFile(warningLogFilePath); - assertTrue(countLinesReasonLogLevelDefault == countLinesReasonLogLevelWarning); + assertEquals(countLinesReasonLogLevelDefault, countLinesReasonLogLevelWarning); } private int readFile(final String logFilePath) throws IOException, FileNotFoundException { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index a53a28fa2..c3d1ec8b8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -39,6 +39,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class AddDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index 90b6fa877..fa8c39a65 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Set; @@ -45,6 +46,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -241,7 +243,7 @@ public void queryResultWithBlanks() throws IOException { } } - @Test(expected = IllegalArgumentException.class) + @Test public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -249,11 +251,14 @@ public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException { reasoner.load(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); - reasoner.answerQuery(queryAtom, true); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true) ) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(Collections.EMPTY_SET, queryResults); + } } } - @Test(expected = IllegalArgumentException.class) + @Test public void queryEmptyKnowledgeBaseAfterReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -263,7 +268,10 @@ public void queryEmptyKnowledgeBaseAfterReasoning() throws IOException { reasoner.reason(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", Expressions.makeVariable("?x")); - reasoner.answerQuery(queryAtom, true); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true) ) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(Collections.EMPTY_SET, queryResults); + } } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java index 7898e0e4e..a2b1b8036 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; /** diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java index dae331856..1ec594328 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; /** * Utility class with static methods for collecting the results of a query for diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index 4db277b56..e03a0d4aa 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -48,6 +48,7 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java index 7be7634e2..e3d87cef4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java @@ -38,6 +38,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerBasics { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 43d54c2dc..7b11e32b1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -42,6 +42,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerCombinedInputs { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java index b9e5580e6..2bcbdfaa8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerCsvInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java index 801556108..1e9d3a113 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java @@ -42,6 +42,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerNegation { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java index 3b72ab8cd..bf7d6aca2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerRdfInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java index 2922c48e4..4e31e2b8a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -37,6 +37,7 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class VLogReasonerSparqlInput { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 460888c59..93ce773fe 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 9a686b44c..bfbd3a3d7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -28,8 +28,8 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 7ea5be93a..929bb1832 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -33,8 +33,8 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -72,7 +72,7 @@ public static void configureLogging() { String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: - consoleAppender.setThreshold(Level.INFO); + consoleAppender.setThreshold(Level.DEBUG); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index cb8b88f6f..05d0b65ef 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.examples; +/*- + * #%L + * VLog4j Examples + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 21b403dcb..40084ccdb 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -36,8 +36,8 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.examples.ExamplesUtils; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index d147a6a19..aef1c2df4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -25,7 +25,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index f203d401f..5d27f95e1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -35,8 +35,8 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 3c3834942..3f14f1556 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -37,7 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index d5a81a085..b3839f86a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -44,7 +44,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index ea3dd7a46..e1c32fc63 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -46,8 +46,8 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; public class TestReasonOverRdfFacts { From feac6b7717f0501a06d33b26cdf7f1165fd76b6a Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 11:29:34 +0200 Subject: [PATCH 0421/1255] modified unit test to remove logs from dir before --- vlog4j-core/src/test/data/logs/.keep | 0 .../vlog4j/core/reasoner/LoggingTest.java | 32 ++++++++++++++----- 2 files changed, 24 insertions(+), 8 deletions(-) delete mode 100644 vlog4j-core/src/test/data/logs/.keep diff --git a/vlog4j-core/src/test/data/logs/.keep b/vlog4j-core/src/test/data/logs/.keep deleted file mode 100644 index e69de29bb..000000000 diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index dbc3c7bc7..5e7806fed 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -32,6 +32,7 @@ import java.io.IOException; import java.util.Arrays; +import org.junit.BeforeClass; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -43,7 +44,7 @@ public class LoggingTest { - public static final String LOGS_FOLDER = "src/test/data/logs/"; + public static final String LOGS_DIRECTORY = "src/test/data/logs/"; private static final Variable vx = Expressions.makeVariable("x"); // p(?x) -> q(?x) @@ -60,6 +61,21 @@ public class LoggingTest { kb.addStatements(rule, factPc); } + @BeforeClass + public static void emptyLogDirectory() { + + final File logsDir = new File(LOGS_DIRECTORY); + + if (!logsDir.exists()) { + logsDir.mkdir(); + } + + final File[] listFiles = logsDir.listFiles(); + for (final File file : listFiles) { + file.delete(); + } + } + // TODO remaining tests: change log file // TODO remaining tests: test that the log level and the log files can be set // any time @@ -78,7 +94,7 @@ public void testSetLogFileNull() throws IOException { @Test public void testSetLogFileInexistent() throws IOException { - final String inexistentFilePath = LOGS_FOLDER + "a/b"; + final String inexistentFilePath = LOGS_DIRECTORY + "a/b"; try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(inexistentFilePath); @@ -101,7 +117,7 @@ public void testSetLogLevelNull() { @Test public void testSetLogFileAppendsToFile() throws IOException { - final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; + final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; @@ -126,7 +142,7 @@ public void testSetLogFileAppendsToFile() throws IOException { @Test public void testLogLevelInfo() throws IOException { - final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelInfo.log"; + final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -145,7 +161,7 @@ public void testLogLevelInfo() throws IOException { @Test public void testLogLevelDebug() throws IOException { - final String logFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDebug.log"; + final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -165,7 +181,7 @@ public void testLogLevelDebug() throws IOException { @Test public void testLogLevelDefault() throws IOException { - final String defaultLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; + final String defaultLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; assertFalse(new File(defaultLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -177,8 +193,8 @@ public void testLogLevelDefault() throws IOException { } final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); - final String warningLogFilePath = LOGS_FOLDER + System.currentTimeMillis() + "-testLogLevelDefault.log"; - //assertFalse(new File(warningLogFilePath).exists()); + final String warningLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; + assertFalse(new File(warningLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(warningLogFilePath); From 362c8e538b4a1d94845384ea12c93f9d650a1592 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 11:59:30 +0200 Subject: [PATCH 0422/1255] if only facts are loaded, and reasoner in KB_LOADED state, then materialisation status is COMPLETE --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 6 +++--- .../core/reasoner/implementation/ReasonerStateTest.java | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 1eaf10614..d8372e42a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -356,8 +356,8 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; - // TODO: if there are no rules, then materialisation state is complete - this.materialisationState = MaterialisationState.INCOMPLETE; + // if there are no rules, then materialisation state is complete + this.materialisationState = rules.isEmpty()? MaterialisationState.COMPLETE: MaterialisationState.INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -383,7 +383,7 @@ String getDataSourcesConfigurationString() { int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, Formatter formatter) { if (dataSource != null) { - String configString = dataSource.toConfigString(); + final String configString = dataSource.toConfigString(); if (configString != null) { formatter.format(dataSource.toConfigString(), dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index e03a0d4aa..a8eb3c923 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -92,7 +92,7 @@ public void testAddFactsAndQuery() throws IOException { Arrays.asList(Collections.singletonList(c))); try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ - assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); assertEquals(expectedAnswersC, queryAnswersC); @@ -108,9 +108,8 @@ public void testAddFactsAndQuery() throws IOException { reasoner.load(); - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ - assertEquals(MaterialisationState.INCOMPLETE, queryResult.getMaterialisationState()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); From 2af6c186a7e8ff815e3d03ababb27c2f8569adec Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 12:06:58 +0200 Subject: [PATCH 0423/1255] make sure KB statements are unmodifyiable --- .../semanticweb/vlog4j/core/reasoner/KnowledgeBase.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 64c6a955d..6517a97ca 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -286,17 +286,18 @@ void addFact(Fact fact) { * Returns all {@link Statement}s of this knowledge base. * * The result can be iterated over and will return statements in the original - * order. + * order. The collection is read-only and cannot be modified to add or delete + * statements. * * @return a collection of statements */ public Collection getStatements() { - return this.statements; + return Collections.unmodifiableCollection(this.statements); } @Override public Iterator iterator() { - return this.statements.iterator(); + return Collections.unmodifiableCollection(this.statements).iterator(); } } From bd97a30c6badf1dee115e2741d36f1702b7fac89 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 12:18:48 +0200 Subject: [PATCH 0424/1255] made fields private in KnowledgeBase --- .../vlog4j/core/reasoner/KnowledgeBase.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 6517a97ca..7e716479b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -62,7 +62,7 @@ public class KnowledgeBase implements Iterable { * @author Markus Kroetzsch * */ - class AddStatementVisitor implements StatementVisitor { + private class AddStatementVisitor implements StatementVisitor { @Override public Boolean visit(Fact statement) { addFact(statement); @@ -81,9 +81,9 @@ public Boolean visit(DataSourceDeclaration statement) { } } - final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); - class ExtractStatementsVisitor implements StatementVisitor { + private class ExtractStatementsVisitor implements StatementVisitor { final ArrayList extracted = new ArrayList<>(); final Class ownType; @@ -127,7 +127,7 @@ public Void visit(DataSourceDeclaration statement) { /** * The primary storage for the contents of the knowledge base. */ - final LinkedHashSet statements = new LinkedHashSet<>(); + private final LinkedHashSet statements = new LinkedHashSet<>(); /** * Known prefixes that can be used to pretty-print the contents of the knowledge @@ -139,13 +139,13 @@ public Void visit(DataSourceDeclaration statement) { /** * Index structure that organises all facts by their predicate. */ - final Map> factsByPredicate = new HashMap<>(); + private final Map> factsByPredicate = new HashMap<>(); /** * Index structure that holds all data source declarations of this knowledge * base. */ - final Set dataSourceDeclarations = new HashSet<>(); + private final Set dataSourceDeclarations = new HashSet<>(); /** * Registers a listener for changes on the knowledge base From fa24e6fb13bdd0893a30da22d01d7014cdc60ccf Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 27 Aug 2019 15:37:14 +0200 Subject: [PATCH 0425/1255] Fixed test to be more robust --- .../semanticweb/vlog4j/core/reasoner/LoggingTest.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 5e7806fed..1455601df 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -117,7 +117,7 @@ public void testSetLogLevelNull() { @Test public void testSetLogFileAppendsToFile() throws IOException { - final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testSetLogFileAppendsToFile.log"; + final String logFilePath = LOGS_DIRECTORY + "-testSetLogFileAppendsToFile.log"; assertFalse(new File(logFilePath).exists()); int countLinesBeforeReset = 0; @@ -142,7 +142,7 @@ public void testSetLogFileAppendsToFile() throws IOException { @Test public void testLogLevelInfo() throws IOException { - final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelInfo.log"; + final String logFilePath = LOGS_DIRECTORY + "-testLogLevelInfo.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -161,7 +161,7 @@ public void testLogLevelInfo() throws IOException { @Test public void testLogLevelDebug() throws IOException { - final String logFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDebug.log"; + final String logFilePath = LOGS_DIRECTORY + "-testLogLevelDebug.log"; assertFalse(new File(logFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -181,7 +181,7 @@ public void testLogLevelDebug() throws IOException { @Test public void testLogLevelDefault() throws IOException { - final String defaultLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; + final String defaultLogFilePath = LOGS_DIRECTORY + "-testLogLevelDefault.log"; assertFalse(new File(defaultLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { @@ -193,7 +193,7 @@ public void testLogLevelDefault() throws IOException { } final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); - final String warningLogFilePath = LOGS_DIRECTORY + System.currentTimeMillis() + "-testLogLevelDefault.log"; + final String warningLogFilePath = LOGS_DIRECTORY + "-testLogLevelDefault2.log"; assertFalse(new File(warningLogFilePath).exists()); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { From 66ca9e1200370060e5de6d1c17536802d81690b0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 27 Aug 2019 15:37:54 +0200 Subject: [PATCH 0426/1255] Support parsing into existing KB --- .../semanticweb/vlog4j/parser/RuleParser.java | 18 ++++++++++++++++-- .../vlog4j/parser/javacc/JavaCCParserBase.java | 10 +++++++++- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index b91cfd6dc..d27d0ffa5 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -36,13 +36,27 @@ /** * Class to statically access VLog parsing functionality. * - * @FIXME Support parsing from multiple files (into one KB). - * * @author Markus Kroetzsch * */ public class RuleParser { + public static void parseInto(KnowledgeBase knowledgeBase, InputStream stream, String encoding) + throws ParsingException { + JavaCCParser javaCcParser = new JavaCCParser(stream, encoding); + javaCcParser.setKnowledgeBase(knowledgeBase); + doParse(javaCcParser); + } + + public static void parseInto(KnowledgeBase knowledgeBase, InputStream stream) throws ParsingException { + parseInto(knowledgeBase, stream, "UTF-8"); + } + + public static void parseInto(KnowledgeBase knowledgeBase, String input) throws ParsingException { + InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, "UTF-8"); + } + public static KnowledgeBase parse(InputStream stream, String encoding) throws ParsingException { return doParse(new JavaCCParser(stream, encoding)); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 52e144235..427dc9143 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -48,7 +48,7 @@ public class JavaCCParserBase { final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); - final KnowledgeBase knowledgeBase = new KnowledgeBase(); + KnowledgeBase knowledgeBase; /** * "Local" variable to remember (universal) body variables during parsing. @@ -81,6 +81,10 @@ public enum FormulaContext { */ BODY } + + public JavaCCParserBase() { + this.knowledgeBase = new KnowledgeBase(); + } Constant createIntegerLiteral(String lexicalForm) { return Expressions.makeConstant(lexicalForm + "^^<" + PrefixDeclarations.XSD_INTEGER + ">"); @@ -211,6 +215,10 @@ void resetVariableSets() { this.headExiVars.clear(); this.headUniVars.clear(); } + + public void setKnowledgeBase(KnowledgeBase knowledgeBase) { + this.knowledgeBase = knowledgeBase; + } public KnowledgeBase getKnowledgeBase() { return knowledgeBase; From 39e95047414df072477aff7dcfaae386ddf0fa2f Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 15:43:08 +0200 Subject: [PATCH 0427/1255] rewrite unit test --- .../implementation/AddDataSourceTest.java | 166 +++++++++++++++--- 1 file changed, 137 insertions(+), 29 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index c3d1ec8b8..37c62d6d4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; /*- * #%L @@ -25,12 +26,12 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Fact; @@ -39,11 +40,20 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class AddDataSourceTest { - private static final String CSV_FILE_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFacts.csv"; + private static final String CSV_FILE_c1_c2_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFacts.csv"; + + private static final String CSV_FILE_c_d_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"; + + private final Set> csvFile_c1_c2_Content = new HashSet<>(Arrays + .asList(Arrays.asList(Expressions.makeConstant("c1")), Arrays.asList(Expressions.makeConstant("c2")))); + + private final Set> csvFile_c_d_Content = new HashSet<>( + Arrays.asList(Arrays.asList(Expressions.makeConstant("c")), Arrays.asList(Expressions.makeConstant("d"))));; @Test public void testAddDataSourceExistentDataForDifferentPredicates() throws IOException { @@ -52,7 +62,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); final Fact factPredicateQArity1 = Expressions.makeFact("q", Arrays.asList(constantA)); final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(factPredicatePArity2); @@ -63,14 +73,16 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); - final QueryResultIterator queryResultIteratorL1 = reasoner.answerQuery( - Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false); - final Set> queryResultsL1 = QueryResultsUtils.collectQueryResults(queryResultIteratorL1); - - final QueryResultIterator queryResultIteratorP1 = reasoner.answerQuery( - Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false); - final Set> queryResultsP1 = QueryResultsUtils.collectQueryResults(queryResultIteratorP1); - assertEquals(queryResultsL1, queryResultsP1); + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } } } @@ -79,7 +91,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep public void testAddDataSourceBeforeLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); @@ -87,70 +99,166 @@ public void testAddDataSourceBeforeLoading() throws IOException { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); reasoner.load(); + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + } } - // TODO rewrite test - @Ignore - @Test(expected = ReasonerStateException.class) + @Test public void testAddDataSourceAfterLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); reasoner.load(); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + } + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { + assertFalse(queryResult.hasNext()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } } } - // TODO rewrite test - @Ignore - @Test(expected = ReasonerStateException.class) + @Test public void testAddDataSourceAfterReasoning() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); - reasoner.load(); reasoner.reason(); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { + assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + } + + try (final QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { + assertFalse(queryResult.hasNext()); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } } } // FIXME decide how to handle datasources with multiple predicates @Ignore - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) + @Test public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource2 = new CsvFileDataSource(new File(CSV_FILE_c_d_PATH)); final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); - kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource2)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + System.out.println(QueryResultsUtils.collectQueryResults(queryResult)); + } + } } // FIXME decide how to handle datasources with multiple predicates @Ignore - // TODO move to a test class for KnowledgeBase - @Test(expected = IllegalArgumentException.class) + @Test public void testAddDataSourceNoFactsForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), Arrays.asList(Expressions.makeConstant("a"))); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(fact); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + QueryResultsUtils.collectQueryResults(queryResult); + } + } + } + + @Test + public void testAddMultipleDataSourcesForPredicate() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource2 = new CsvFileDataSource( + new File(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv")); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource2)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); + expectedAnswers.addAll(csvFile_c_d_Content); + + assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + + } + } + } + + @Test + public void testAddDataSourceAndFactsForPredicate() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), + Arrays.asList(Expressions.makeConstant("a"))); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + try (QueryResultIterator queryResult = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeVariable("x")), true)) { + final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); + expectedAnswers.add(Arrays.asList(Expressions.makeConstant("a"))); + + assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + } + } } } From 48027d59ea97a6a48c6e483b4796d1288cd009bd Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 17:11:12 +0200 Subject: [PATCH 0428/1255] make Reasoner#load() package protected and update examples; This is because currently we allow multiple datasources for the same predicate --- .../vlog4j/core/reasoner/Reasoner.java | 10 ---- .../reasoner/implementation/VLogReasoner.java | 4 +- .../vlog4j/core/reasoner/LoggingTest.java | 10 ---- .../core/reasoner/ReasonerTimeoutTest.java | 10 ---- .../implementation/AnswerQueryTest.java | 8 +-- .../GeneratedAnonymousIndividualsTest.java | 6 --- .../implementation/ReasonerStateTest.java | 50 ++++++++----------- .../examples/CompareWikidataDBpedia.java | 17 +++---- .../vlog4j/examples/CountingTriangles.java | 5 +- .../vlog4j/examples/DoidExample.java | 5 +- .../vlog4j/examples/ExamplesUtils.java | 36 +++++++------ .../InMemoryGraphAnalysisExample.java | 13 +++-- .../examples/SimpleReasoningExample.java | 2 - .../examples/core/AddDataFromCsvFile.java | 26 ++++++++-- .../examples/core/AddDataFromRdfFile.java | 11 ++-- .../core/AddDataFromSparqlQueryResults.java | 9 ++-- .../core/ConfigureReasonerLogging.java | 3 -- .../SkolemVsRestrictedChaseTermination.java | 46 +++++++++-------- .../examples/graal/AddDataFromDlgpFile.java | 25 +++++++--- .../examples/graal/AddDataFromGraal.java | 28 +++++++++-- .../examples/graal/DoidExampleGraal.java | 2 - .../owlapi/OwlOntologyToRulesAndFacts.java | 8 +-- .../examples/rdf/AddDataFromRdfModel.java | 1 - .../vlog4j/rdf/TestReasonOverRdfFacts.java | 4 +- 24 files changed, 166 insertions(+), 173 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index d8872e987..35b18d3cc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -188,16 +188,6 @@ public static Reasoner getInstance() { */ void setLogFile(String filePath); - /** - * Loads the knowledge base, consisting of the current rules and facts, - * into the reasoner (if it has not been loaded yet). After loading, the - * reasoner is ready for reasoning and querying. - * - * @throws IOException if an I/O error occurs related to the resources in the - * knowledge base to be loaded. - */ - void load() throws IOException; - /** * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, * Cyclic, or cyclicity cannot be determined. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index d8372e42a..4611a8d7e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -308,8 +308,8 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } - @Override - public void load() throws IOException { + //@Override + void load() throws IOException { validateNotClosed(); switch (this.reasonerState) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java index 1455601df..33b9b8c24 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java @@ -86,7 +86,6 @@ public void testSetLogFileNull() throws IOException { reasoner.setLogFile(null); reasoner.setLogLevel(LogLevel.INFO); - reasoner.load(); reasoner.reason(); } // TODO test that logging is redirected to system output @@ -101,7 +100,6 @@ public void testSetLogFileInexistent() throws IOException { assertFalse(new File(inexistentFilePath).exists()); reasoner.setLogLevel(LogLevel.INFO); - reasoner.load(); reasoner.reason(); } // TODO test that logging is redirected to system output @@ -124,14 +122,12 @@ public void testSetLogFileAppendsToFile() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogLevel(LogLevel.INFO); reasoner.setLogFile(logFilePath); - reasoner.load(); reasoner.reason(); countLinesBeforeReset = readFile(logFilePath); assertTrue(countLinesBeforeReset > 0); reasoner.resetReasoner(); - reasoner.load(); reasoner.reason(); } final int countLinesAfterReset = readFile(logFilePath); @@ -149,8 +145,6 @@ public void testLogLevelInfo() throws IOException { reasoner.setLogLevel(LogLevel.INFO); reasoner.setLogFile(logFilePath); - reasoner.load(); - reasoner.setLogLevel(LogLevel.INFO); reasoner.reason(); reasoner.setLogLevel(LogLevel.INFO); } @@ -168,8 +162,6 @@ public void testLogLevelDebug() throws IOException { reasoner.setLogLevel(LogLevel.DEBUG); reasoner.setLogFile(logFilePath); - reasoner.load(); - reasoner.setLogLevel(LogLevel.DEBUG); reasoner.reason(); reasoner.setLogLevel(LogLevel.DEBUG); reasoner.close(); @@ -187,7 +179,6 @@ public void testLogLevelDefault() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(defaultLogFilePath); - reasoner.load(); reasoner.reason(); reasoner.close(); } @@ -199,7 +190,6 @@ public void testLogLevelDefault() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.setLogFile(warningLogFilePath); reasoner.setLogLevel(LogLevel.WARNING); - reasoner.load(); reasoner.reason(); reasoner.close(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java index f93ca6b10..586c9c4d7 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java @@ -126,8 +126,6 @@ public void skolem() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - this.reasoner.load(); - assertFalse(this.reasoner.reason()); } @@ -136,8 +134,6 @@ public void restricted() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - this.reasoner.load(); - assertFalse(this.reasoner.reason()); } @@ -145,8 +141,6 @@ public void restricted() throws IOException { public void skolemAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - this.reasoner.load(); - this.reasoner.setReasoningTimeout(timeout); assertFalse(this.reasoner.reason()); @@ -156,8 +150,6 @@ public void skolemAfterLoad() throws IOException { public void restrictedAfterLoad() throws IOException { this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - this.reasoner.load(); - this.reasoner.setReasoningTimeout(timeout); assertFalse(this.reasoner.reason()); @@ -168,7 +160,6 @@ public void resetReasoningTimeoutToNull() throws IOException { this.reasoner.setReasoningTimeout(timeout); this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - this.reasoner.load(); assertFalse(this.reasoner.reason()); this.reasoner.resetReasoner(); @@ -179,7 +170,6 @@ public void resetReasoningTimeoutToNull() throws IOException { this.reasoner.setReasoningTimeout(null); - this.reasoner.load(); assertTrue(this.reasoner.reason()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java index fa8c39a65..d9a0e2913 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java @@ -47,7 +47,6 @@ import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; public class AnswerQueryTest { @@ -66,9 +65,10 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOExcep @SuppressWarnings("unchecked") final Set> factCCD = Sets.newSet(Arrays.asList(constantC, constantC, constantD)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addStatement(fact); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + + try (final VLogReasoner reasoner =new VLogReasoner(kb)) { reasoner.load(); final PositiveLiteral queryAtomXYZ = Expressions.makePositiveLiteral(predicate, x, y, z); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index fc21e98f8..8cd91a117 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -80,7 +80,6 @@ public void testBlanksSkolemChaseNoRuleRewrite() throws IOException { reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); @@ -99,7 +98,6 @@ public void testBlanksSkolemChaseSplitHeadPieces() throws IOException { // P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); @@ -115,7 +113,6 @@ public void testBlanksRestrictedChaseNoRuleRewrite() throws IOException { reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - reasoner.load(); reasoner.reason(); reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); @@ -135,11 +132,9 @@ public void testBlanksRestrictedChaseSplitHeadPieces() throws IOException { // P(?x,?z)} } reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.load(); reasoner.reason(); // FIXME check this test - // <<<<<<< HEAD // reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, // true); // // expected fact: P(c, _:b) @@ -151,7 +146,6 @@ public void testBlanksRestrictedChaseSplitHeadPieces() throws IOException { // assertEquals(queryResult.get(0), "c"); // } // reasoner.exportQueryAnswersToCsv(this.queryAtom, excludeBlanksFilePath, - // ======= checkTwoDistinctBlanksGenerated(reasoner); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index a8eb3c923..fdebd1fdc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -83,36 +83,35 @@ public void testSetReasoningTimeout() { @Test public void testAddFactsAndQuery() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.getKnowledgeBase().addStatement(factPc); + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatement(factPc); reasoner.load(); - + final PositiveLiteral query = Expressions.makePositiveLiteral(p, x); - final Set> expectedAnswersC = new HashSet<>( - Arrays.asList(Collections.singletonList(c))); - - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + final Set> expectedAnswersC = new HashSet<>(Arrays.asList(Collections.singletonList(c))); + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); - + assertEquals(expectedAnswersC, queryAnswersC); } - reasoner.getKnowledgeBase().addStatement(factPd); - - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); } reasoner.load(); - - try(final QueryResultIterator queryResult = reasoner.answerQuery(query, true)){ + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); - + final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); - + final Set> expectedAnswersCD = new HashSet<>( Arrays.asList(Collections.singletonList(c), Collections.singletonList(d))); assertEquals(expectedAnswersCD, queryAnswersD); @@ -120,16 +119,6 @@ public void testAddFactsAndQuery() throws IOException { } } - // FIXME update test - @Ignore - @Test(expected = ReasonerStateException.class) - public void testAddRules1() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.getKnowledgeBase().addStatement(ruleQxPx); - reasoner.load(); - } - } - @Test public void testAddRules2() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -314,18 +303,21 @@ public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { @Test public void testSuccessiveCloseAfterLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.close(); reasoner.close(); } } - @Test - public void testSuccessiveCloseBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { + @Test(expected=ReasonerStateException.class) + public void testSuccessiveCloseBeforeLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.close(); reasoner.close(); + reasoner.load(); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 93ce773fe..7e1031f42 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -59,17 +59,17 @@ public class CompareWikidataDBpedia { static String sparqlGetWikiIriDBpedia = "?result ?enwikipageHttp . " + "BIND( IRI(CONCAT(\"https\",SUBSTR(str(?enwikipageHttp), 5))) AS ?enwikipage)"; - public static void main(String[] args) throws ParsingException, IOException { + public static void main(final String[] args) throws ParsingException, IOException { ExamplesUtils.configureLogging(); // Wikidata pattern: P69 is "educated at"; Q154804 is "University of Leipzig" - String wikidataSparql = "?result wdt:P69 wd:Q154804 . " + sparqlGetWikiIriWikidata; + final String wikidataSparql = "?result wdt:P69 wd:Q154804 . " + sparqlGetWikiIriWikidata; // DBpedia pattern: - String dbpediaSparql = "?result . " + final String dbpediaSparql = "?result . " + sparqlGetWikiIriDBpedia; // Configure the SPARQL data sources and some rules to analyse results: - String rules = "" // + final String rules = "" // + "@prefix wdqs: ." // + "@prefix dbp: ." // + "@source dbpResult(2) : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // @@ -86,19 +86,18 @@ public static void main(String[] args) throws ParsingException, IOException { final KnowledgeBase kb = RuleParser.parse(rules); try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); reasoner.reason(); - int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); - int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); - int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); + final int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); + final int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); + final int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); ExamplesUtils.printOutQueryAnswers("dbpOnly(?X)", reasoner); - + System.out.println("Note: some of these results might still be in Wikidata, due to:\n" + "* recent Wikipedia article renamings that are not updated in DBpedia\n" + "* failure to match Wikipedia URLs due to small differences in character encoding\n"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 3a4fc8cc5..4a7f2a690 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -57,10 +57,9 @@ public static void main(final String[] args) throws IOException { try (VLogReasoner reasoner = new VLogReasoner(kb)) { - /* Initialise reasoner and compute inferences */ - reasoner.load(); - System.out.println("Note: Materialisation includes SPARQL query answering."); + + /* Initialise reasoner and compute inferences */ reasoner.reason(); /* Execute queries */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index bfbd3a3d7..19c70a27a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -65,10 +65,9 @@ public static void main(final String[] args) throws IOException { reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); reasoner.setLogLevel(LogLevel.DEBUG); - /* Initialise reasoner and compute inferences */ - reasoner.load(); - System.out.println("Note: Materialisation includes SPARQL query answering."); + + /* Initialise reasoner and compute inferences */ reasoner.reason(); /* Execute some queries */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 929bb1832..4d8c57346 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -33,6 +33,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.parser.ParsingException; @@ -66,10 +67,10 @@ private ExamplesUtils() { */ public static void configureLogging() { // Create the appender that will write log messages to the console. - ConsoleAppender consoleAppender = new ConsoleAppender(); + final ConsoleAppender consoleAppender = new ConsoleAppender(); // Define the pattern of log messages. // Insert the string "%c{1}:%L" to also show class name and line. - String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: consoleAppender.setThreshold(Level.DEBUG); @@ -89,8 +90,10 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R System.out.println("Answers to query " + queryAtom + " :"); try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { answers.forEachRemaining(answer -> System.out.println(" - " + answer)); - System.out.println(); + + System.out.println("Query answers are: " + answers.getMaterialisationState()); } + System.out.println(); } /** @@ -102,9 +105,9 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R */ public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) { try { - PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); printOutQueryAnswers(query, reasoner); - } catch (ParsingException e) { + } catch (final ParsingException e) { throw new RuntimeException(e.getMessage(), e); } } @@ -119,11 +122,11 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner */ public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { try { - PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); try (final QueryResultIterator answers = reasoner.answerQuery(query, true)) { return iteratorSize(answers); } - } catch (ParsingException e) { + } catch (final ParsingException e) { throw new RuntimeException(e.getMessage(), e); } } @@ -136,10 +139,11 @@ public static int getQueryAnswerCount(final String queryString, final Reasoner r * @param Iterator to iterate over * @return number of elements in iterator */ - public static int iteratorSize(Iterator iterator) { + public static int iteratorSize(final Iterator iterator) { int size = 0; - for (; iterator.hasNext(); ++size) + for (; iterator.hasNext(); ++size) { iterator.next(); + } return size; } @@ -149,10 +153,11 @@ public static int iteratorSize(Iterator iterator) { * @param predicateName for the new predicate * @param arity number of variables */ - private static PositiveLiteral makeQueryAtom(String predicateName, int arity) { + private static PositiveLiteral makeQueryAtom(final String predicateName, final int arity) { final List vars = new ArrayList<>(); - for (int i = 0; i < arity; i++) + for (int i = 0; i < arity; i++) { vars.add(Expressions.makeVariable("x" + i)); + } return Expressions.makePositiveLiteral(predicateName, vars); } @@ -163,11 +168,14 @@ private static PositiveLiteral makeQueryAtom(String predicateName, int arity) { * @param atomName atom's name * @param arity atom's arity */ - public static void exportQueryAnswersToCSV(Reasoner reasoner, String atomName, int arity) + public static void exportQueryAnswersToCSV(final Reasoner reasoner, final String atomName, final int arity) throws ReasonerStateException, IOException { final PositiveLiteral atom = makeQueryAtom(atomName, arity); - String path = ExamplesUtils.OUTPUT_FOLDER + atomName + ".csv"; - reasoner.exportQueryAnswersToCsv(atom, path, true); + final String path = ExamplesUtils.OUTPUT_FOLDER + atomName + ".csv"; + + final MaterialisationState correctness = reasoner.exportQueryAnswersToCsv(atom, path, true); + + System.out.println("Query answers are: " + correctness); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 05d0b65ef..9e68b8406 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -45,15 +45,15 @@ */ public class InMemoryGraphAnalysisExample { - public static void main(String[] args) throws ParsingException, IOException { + public static void main(final String[] args) throws ParsingException, IOException { ExamplesUtils.configureLogging(); /* 1. Create a simple random graph */ System.out.println("Generating random graph ..."); - int vertexCount = 10000; - double density = 0.03; + final int vertexCount = 10000; + final double density = 0.03; // initialise data source for storing edges (estimate how many we'll need) - InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); + final InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); int edgeCount = 0; for (int i = 1; i <= vertexCount; i++) { for (int j = 1; j <= vertexCount; j++) { @@ -64,7 +64,7 @@ public static void main(String[] args) throws ParsingException, IOException { } } // also make a unary data source to mark vertices: - InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); + final InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); for (int i = 1; i <= vertexCount; i++) { vertices.addTuple("v" + i); } @@ -85,13 +85,12 @@ public static void main(String[] args) throws ParsingException, IOException { /* 3. Use reasoner to compute some query results */ try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); reasoner.reason(); System.out.println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + ExamplesUtils.getQueryAnswerCount("unreachable(?X)", reasoner)); System.out.println("Number of bi-directional triangles: " - + ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6); + + (ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6)); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 81a3362f9..5b5875a63 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -77,8 +77,6 @@ public static void main(final String[] args) throws IOException { } try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - reasoner.reason(); /* Execute some queries */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index 7af9eb473..b68db527b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -55,9 +55,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.configureLogging(); + final String initialFactsHasPart = ""// a file input: + + "@source hasPart(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ."; + final String rules = "" // first declare file inputs: + "@source bicycle(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." - + "@source hasPart(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ." + "@source wheel(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." // every bicycle has some part that is a wheel: + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." @@ -67,20 +69,36 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; - final KnowledgeBase kb = RuleParser.parse(rules); - /* * Loading, reasoning, and querying while using try-with-resources to close the * reasoner automatically. */ + final KnowledgeBase kb = new KnowledgeBase(); try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + /* + * 1. Loading the initial facts with hasPart predicate into reasoner. + */ + RuleParser.parseInto(kb, initialFactsHasPart); + reasoner.reason(); + + /* + * Query initial facts with hasPart predicate. + */ System.out.println("Before materialisation:"); ExamplesUtils.printOutQueryAnswers("hasPart(?X, ?Y)", reasoner); + /* + * 2. Loading further facts and rules into the reasoner, and materialising the + * loaded facts with the rules. + */ + RuleParser.parseInto(kb, rules); /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); + + /* + * Querying facts with hasPart predicate after materialisation. + */ System.out.println("After materialisation:"); final PositiveLiteral hasPartXY = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); ExamplesUtils.printOutQueryAnswers(hasPartXY, reasoner); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index dd62026ce..a00a008d1 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -79,16 +79,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final KnowledgeBase kb = RuleParser.parse(rules); /* - * 2. Loading, reasoning, querying and exporting, while using try-with-resources - * to close the reasoner automatically. + * 2. reasoning, querying and exporting, while using try-with-resources to close + * the reasoner automatically. */ - try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - - System.out.println("Before materialisation:"); - - ExamplesUtils.printOutQueryAnswers("triple(?X, , ?Y)", reasoner); + try (final Reasoner reasoner = new VLogReasoner(kb)) { /* The reasoner will use the Restricted Chase by default. */ reasoner.reason(); System.out.println("After materialisation:"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java index 40084ccdb..6dd775bb9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java @@ -129,7 +129,7 @@ public static void main(final String[] args) throws IOException { */ kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); - reasoner.load(); + reasoner.reason(); /* * We construct a query PositiveLiteral for the predicated associated to the @@ -164,13 +164,10 @@ public static void main(final String[] args) throws IOException { final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); /* - * We reset the reasoner in order to add the created rule, and reason on the - * data added from the Wikidata SPARQL query result. + * We add the created rule, and reason on the data added from the Wikidata + * SPARQL query result. */ - reasoner.resetReasoner(); - kb.addStatement(rule); - reasoner.load(); reasoner.reason(); /* We query the reasoner for facts of the haveChildrenTogether predicate. */ diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java index 3ca012638..828fa4339 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java @@ -103,7 +103,6 @@ public static void main(final String[] args) throws IOException { * Default reasoner log level is WARNING. */ reasoner.setLogFile(reasonerWarningLogFilePath); - reasoner.load(); reasoner.reason(); /* @@ -123,7 +122,6 @@ public static void main(final String[] args) throws IOException { */ reasoner.setLogFile(reasonerInfoLogFilePath); - reasoner.load(); reasoner.reason(); reasoner.resetReasoner(); @@ -138,7 +136,6 @@ public static void main(final String[] args) throws IOException { * redirected to System output by default. */ reasoner.setLogFile(reasonerDebugLogFilePath); - reasoner.load(); reasoner.reason(); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index aef1c2df4..80dbaba9e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -46,13 +46,13 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.configureLogging(); - /* 1. Load data and prepare rules. */ - - final String rules = "" // define some facts: + final String facts = ""// define some facts: + "bicycle(bicycle1) ." // + "hasPart(bicycle1, wheel1) ." // + "wheel(wheel1) ." // - + "bicycle(bicycle2) ." // + + "bicycle(bicycle2) ."; + + final String rules = "" // every bicycle has some part that is a wheel: + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // // every wheel is part of some bicycle: @@ -61,15 +61,20 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; - final KnowledgeBase kb = RuleParser.parse(rules); + /* + * 1. Load facts into a knowledge base + */ + final KnowledgeBase kb = RuleParser.parse(facts); /* - * 2. Loading, reasoning, and querying. Use try-with resources, or remember to - * call close() to free the reasoner resources. + * 2. Load the knowledge base into the reasoner */ try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + reasoner.reason(); + /* + * 3. Query the reasoner before applying rules for fact materialisation + */ final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); /* See that there is no fact HasPartIDB before reasoning. */ @@ -77,8 +82,13 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); /* - * As the Skolem Chase is known not to terminate for this set of rules and - * facts, it is interrupted after one second. + * 4. Load rules into the knowledge base + */ + RuleParser.parseInto(kb, rules); + /* + * 5. Materialise with the Skolem Chase. As the Skolem Chase is known not to + * terminate for this set of rules and facts, it is interrupted after one + * second. */ reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); reasoner.setReasoningTimeout(1); @@ -97,23 +107,15 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + ExamplesUtils.iteratorSize(answers) + " results for hasPart(?X, ?Y)."); /* - * We reset the reasoner and apply the Restricted Chase on the same set of rules - * and facts + * 6. We reset the reasoner to discard all inferences, and apply the Restricted + * Chase on the same set of rules and facts */ System.out.println(); reasoner.resetReasoner(); - reasoner.load(); - - /* - * See that there is no fact HasPartIDB before reasoning. All inferred facts - * have been discarded when the reasoner was reset. - */ - System.out.println("We can verify that there are no inferences for hasPart(?X, ?Y) after reset."); - ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); /* - * As the Restricted Chase is known to terminate for this set of rules and - * facts, we will not interrupt it. + * 7. Materialise with the Restricted Chase. As the Restricted Chase is known to + * terminate for this set of rules and facts, we will not interrupt it. */ reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); reasoner.setReasoningTimeout(null); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java index f429ff4a0..07bfafa49 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java @@ -103,19 +103,30 @@ public static void main(final String[] args) throws IOException { try (Reasoner reasoner = Reasoner.getInstance()) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); - for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { - kb.addStatement(graalConjunctiveQueryToRule.getRule()); - } - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); - reasoner.load(); + /* + * Add facts to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + /* + * Load the knowledge base into the reasoner + */ + reasoner.reason(); System.out.println("Before materialisation:"); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { ExamplesUtils.printOutQueryAnswers(graalConjunctiveQueryToRule.getQuery(), reasoner); } - /* The reasoner will use the Restricted Chase by default. */ + /* + * Add rules to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { + kb.addStatement(graalConjunctiveQueryToRule.getRule()); + } + /* + * Materialise facts using rules + */ reasoner.reason(); System.out.println("After materialisation:"); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java index 7d595dfad..497acef27 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java @@ -122,17 +122,35 @@ public static void main(final String[] args) throws IOException { * the reasoner automatically. */ final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); - kb.addStatements(convertedGraalConjunctiveQuery.getRule()); - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); try (Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + + /* + * Add facts to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + /* + * Load the knowledge base into the reasoner + */ + reasoner.reason(); + + /* + * Query the loaded facts + */ System.out.println("Before materialisation:"); ExamplesUtils.printOutQueryAnswers(convertedGraalConjunctiveQuery.getQuery(), reasoner); - /* The reasoner will use the Restricted Chase by default. */ + /* + * Add rules to the reasoner knowledge base + */ + kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(convertedGraalConjunctiveQuery.getRule()); + + /* + * Materialise facts using rules + */ reasoner.reason(); + System.out.println("After materialisation:"); ExamplesUtils.printOutQueryAnswers(convertedGraalConjunctiveQuery.getQuery(), reasoner); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 5d27f95e1..4a7263218 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -129,8 +129,6 @@ public static void main(final String[] args) throws IOException { System.out.println("Rules configured:\n--"); kb.getRules().forEach(System.out::println); System.out.println("--"); - reasoner.load(); - System.out.println("Loading completed."); System.out.println("Starting reasoning (including SPARQL query answering) ..."); reasoner.reason(); System.out.println("... reasoning completed."); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 3f14f1556..c85abf354 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -87,10 +87,10 @@ public static void main(final String[] args) throws OWLOntologyCreationException kb.addStatements(owlToRulesConverter.getFacts()); try (VLogReasoner reasoner = new VLogReasoner(kb)) { - /* Load rules and facts obtained from the ontology */ - reasoner.load(); - - /* Reason over loaded ontology with the default algorithm Restricted Chase */ + /* + * Load rules and facts obtained from the ontology, and reason over loaded + * ontology with the default algorithm Restricted Chase + */ System.out.println("Reasoning default algorithm: " + reasoner.getAlgorithm()); reasoner.reason(); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java index b3839f86a..7f8dc3024 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java @@ -132,7 +132,6 @@ public static void main(final String[] args) kb.addStatements(tripleFactsISWC2017); try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); reasoner.reason(); /* We query for persons whose organization name is "TU Dresden" . */ diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java index e1c32fc63..0bd2e7d25 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java @@ -72,7 +72,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl kb.addStatements(facts); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + reasoner.reason(); final PositiveLiteral universalQuery = makePositiveLiteral(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(subject, predicate, object)); @@ -91,7 +91,7 @@ public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandle kb.addStatements(facts); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); + reasoner.reason(); final Constant inventionPredicate = makeConstant("https://example.org/invention"); final Constant carlBenzSubject = makeConstant("https://example.org/Carl-Benz"); From 14b6e138667c1c7f4407ed11717a679a911a78df Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 17:28:33 +0200 Subject: [PATCH 0429/1255] fix log messages --- .../reasoner/implementation/VLogReasoner.java | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 4611a8d7e..b2262d9a4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -286,7 +286,7 @@ public Algorithm getAlgorithm() { public void setReasoningTimeout(Integer seconds) { validateNotClosed(); if (seconds != null) { - Validate.isTrue(seconds > 0, "Only strictly positive timeout period alowed!", seconds); + Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); } this.timeoutAfterSeconds = seconds; } @@ -433,11 +433,11 @@ void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); } } } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + throw new RuntimeException("Invalid data sources configuration!", e); } } @@ -459,12 +459,12 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource final int dataSourcePredicateArity = this.vLog .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty: ", dataSource, predicate); + LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); } else if (predicate.getArity() != dataSourcePredicateArity) { throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RuntimeException("Inconsistent reasoner state!", e); } } @@ -483,11 +483,11 @@ void loadFacts() { this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug("Loaded direct fact " + vLogPredicateName + Arrays.toString(tuple)); + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); } } } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + throw new RuntimeException("Invalid data sources configuration!", e); } } } @@ -500,11 +500,11 @@ void loadRules() { this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); if (LOGGER.isDebugEnabled()) { for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { - LOGGER.debug("Loaded rule " + rule.toString()); + LOGGER.debug("Loaded rule {}.", rule.toString()); } } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RuntimeException("Inconsistent reasoner state!", e); } } @@ -584,7 +584,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla throw new RuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty."); + + " that does not occur in the knowledge base. Answer must be empty!"); return new EmptyQueryResultIterator(MaterialisationState.COMPLETE); } @@ -608,7 +608,7 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, try { this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { throw new IllegalArgumentException(MessageFormat.format( "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); @@ -685,7 +685,7 @@ public boolean isMFC() { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, - "checking rules acyclicity is not allowed before loading!"); + "Checking rules acyclicity is not allowed before loading!"); } CyclicCheckResult checkCyclic; @@ -701,7 +701,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, - "checking rules acyclicity is not allowed before loading!"); + "Checking rules acyclicity is not allowed before loading!"); } CyclicCheckResult checkCyclic; @@ -785,8 +785,8 @@ private void updateReasonerToKnowledgeBaseChanged() { */ void validateNotClosed() throws ReasonerStateException { if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.error("Invalid operation requested on a closed reasoner object."); - throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner."); + LOGGER.error("Invalid operation requested on a closed reasoner object!"); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); } } From 739271cfa471141a3d11ed68fbb951b7804ba2ef Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 17:39:51 +0200 Subject: [PATCH 0430/1255] renamed MaterialisationState to Correctness --- ...rialisationState.java => Correctness.java} | 19 +++++------ .../core/reasoner/QueryResultIterator.java | 2 +- .../vlog4j/core/reasoner/Reasoner.java | 2 +- .../EmptyQueryResultIterator.java | 8 ++--- .../VLogQueryResultIterator.java | 8 ++--- .../reasoner/implementation/VLogReasoner.java | 32 +++++++++---------- .../implementation/AddDataSourceTest.java | 22 ++++++------- .../implementation/ReasonerStateTest.java | 8 ++--- .../vlog4j/examples/ExamplesUtils.java | 4 +-- 9 files changed, 53 insertions(+), 52 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/{MaterialisationState.java => Correctness.java} (69%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java similarity index 69% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java index 3e405eae8..a7dc7917d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/MaterialisationState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java @@ -27,29 +27,30 @@ * @author Irina Dragoste * */ -public enum MaterialisationState { +public enum Correctness { /** * Reasoning has not completed. Query answering yields sound, but possibly * incomplete answers. */ - INCOMPLETE("incomplete"), + SOUND_BUT_INCOMPLETE("sound but incomplete"), /** - * Query answering may give incorrect answers. Re-materialisation - * ({@link Reasoner#reason()}) is required, in order to obtain correct results. + * Query answering may give incorrect (unsound or incomplete) answers. + * Re-materialisation ({@link Reasoner#reason()}) is required, in order to + * obtain correct results. */ - WRONG("wrong"), + INCORRECT("incorrect"), /** - * Reasoning over current knowledge base is complete, and query answering yields - * sound and complete results. + * Reasoning over current knowledge base has completed, and query answering + * yields correct (sound and complete) results. */ - COMPLETE("complete"); + SOUND_AND_COMPLETE("sound and complete"); private final String name; - private MaterialisationState(String name) { + private Correctness(String name) { this.name = name; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java index c2a7ee746..e0ad5217c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java @@ -26,7 +26,7 @@ public interface QueryResultIterator extends Iterator, AutoCloseable { - public MaterialisationState getMaterialisationState(); + public Correctness getMaterialisationState(); public void close(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 35b18d3cc..f55b5e2a1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -375,7 +375,7 @@ public static Reasoner getInstance() { * ({@code csvFilePath)}. */ // TODO update javadoc with return type - MaterialisationState exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) + Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) throws IOException; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java index 268c27371..d83bac081 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -21,7 +21,7 @@ */ import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; /** @@ -32,9 +32,9 @@ */ public class EmptyQueryResultIterator implements QueryResultIterator { - final MaterialisationState materialisationState; + final Correctness materialisationState; - public EmptyQueryResultIterator(MaterialisationState materialisationState) { + public EmptyQueryResultIterator(Correctness materialisationState) { this.materialisationState = materialisationState; } @@ -53,7 +53,7 @@ public QueryResult next() { return null; } - public MaterialisationState getMaterialisationState() { + public Correctness getMaterialisationState() { return this.materialisationState; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java index fe91caae8..648250fd9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java @@ -21,7 +21,7 @@ */ import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import karmaresearch.vlog.Term; @@ -38,11 +38,11 @@ public class VLogQueryResultIterator implements QueryResultIterator { private final TermQueryResultIterator vLogTermQueryResultIterator; - private final MaterialisationState materialisationState; + private final Correctness materialisationState; // TODO add reasoningState to constructor public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, - final MaterialisationState materialisationState) { + final Correctness materialisationState) { this.vLogTermQueryResultIterator = termQueryResultIterator; this.materialisationState = materialisationState; } @@ -63,7 +63,7 @@ public void close() { this.vLogTermQueryResultIterator.close(); } - public MaterialisationState getMaterialisationState() { + public Correctness getMaterialisationState() { return this.materialisationState; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b2262d9a4..21b8a5706 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -31,10 +31,10 @@ import org.semanticweb.vlog4j.core.model.implementation.VariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -244,7 +244,7 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { final Set rules = new HashSet<>(); private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; - private MaterialisationState materialisationState = MaterialisationState.INCOMPLETE; + private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; private LogLevel internalLogLevel = LogLevel.WARNING; private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; @@ -357,7 +357,7 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.materialisationState = rules.isEmpty()? MaterialisationState.COMPLETE: MaterialisationState.INCOMPLETE; + this.correctness = rules.isEmpty()? Correctness.SOUND_AND_COMPLETE: Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -558,10 +558,10 @@ private void runChase() { } if (this.reasoningCompleted) { - this.materialisationState = MaterialisationState.COMPLETE; + this.correctness = Correctness.SOUND_AND_COMPLETE; LOGGER.info("Completed materialisation of inferences."); } else { - this.materialisationState = MaterialisationState.INCOMPLETE; + this.correctness = Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); } } @@ -585,15 +585,15 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(MaterialisationState.COMPLETE); + return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); } - logWarningOnMaterialisationState(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.materialisationState); + logWarningOnCorrectness(); + return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); } @Override - public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { @@ -614,14 +614,14 @@ public MaterialisationState exportQueryAnswersToCsv(final PositiveLiteral query, "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } - logWarningOnMaterialisationState(); - return this.materialisationState; + logWarningOnCorrectness(); + return this.correctness; } - private void logWarningOnMaterialisationState() { - if (this.materialisationState != MaterialisationState.COMPLETE) { + private void logWarningOnCorrectness() { + if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", - this.materialisationState); + this.correctness); } } @@ -750,7 +750,7 @@ private void updateReasonerToKnowledgeBaseChanged() { || this.reasonerState.equals(ReasonerState.MATERIALISED)) { this.reasonerState = ReasonerState.KB_CHANGED; - this.materialisationState = MaterialisationState.WRONG; + this.correctness = Correctness.INCORRECT; } } @@ -774,7 +774,7 @@ private void updateReasonerToKnowledgeBaseChanged() { // private void updateMaterialisationStateOnStatementsAdded(boolean materialisationInvalidated) { // if (this.reasonerState.equals(ReasonerState.KB_CHANGED) && materialisationInvalidated) { -// this.materialisationState = MaterialisationState.WRONG; +// this.materialisationState = Correctness.WRONG; // } // } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index 37c62d6d4..b387d4b80 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -39,8 +39,8 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; public class AddDataSourceTest { @@ -76,12 +76,12 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } @@ -102,12 +102,12 @@ public void testAddDataSourceBeforeLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } @@ -130,13 +130,13 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } } @@ -158,13 +158,13 @@ public void testAddDataSourceAfterReasoning() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } } @@ -231,7 +231,7 @@ public void testAddMultipleDataSourcesForPredicate() throws IOException { expectedAnswers.addAll(csvFile_c_d_Content); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } @@ -256,7 +256,7 @@ public void testAddDataSourceAndFactsForPredicate() throws IOException { expectedAnswers.add(Arrays.asList(Expressions.makeConstant("a"))); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); } } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index fdebd1fdc..f16fcd530 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -46,8 +46,8 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; @@ -92,7 +92,7 @@ public void testAddFactsAndQuery() throws IOException { final Set> expectedAnswersC = new HashSet<>(Arrays.asList(Collections.singletonList(c))); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); assertEquals(expectedAnswersC, queryAnswersC); @@ -101,14 +101,14 @@ public void testAddFactsAndQuery() throws IOException { reasoner.getKnowledgeBase().addStatement(factPd); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(MaterialisationState.WRONG, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); } reasoner.load(); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(MaterialisationState.COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 4d8c57346..03cc92e2e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -33,7 +33,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.MaterialisationState; +import org.semanticweb.vlog4j.core.reasoner.Correctness; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.parser.ParsingException; @@ -173,7 +173,7 @@ public static void exportQueryAnswersToCSV(final Reasoner reasoner, final String final PositiveLiteral atom = makeQueryAtom(atomName, arity); final String path = ExamplesUtils.OUTPUT_FOLDER + atomName + ".csv"; - final MaterialisationState correctness = reasoner.exportQueryAnswersToCsv(atom, path, true); + final Correctness correctness = reasoner.exportQueryAnswersToCsv(atom, path, true); System.out.println("Query answers are: " + correctness); } From 139c4dfc47505cb5f0912273bd507d5db69c10d6 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 18:35:21 +0200 Subject: [PATCH 0431/1255] correct issue with KnowledgeBase events --- .../vlog4j/core/model/api/Statement.java | 2 +- .../vlog4j/core/reasoner/KnowledgeBase.java | 50 ++++++++++--------- .../core/reasoner/KnowledgeBaseListener.java | 47 +++++++++-------- .../reasoner/implementation/VLogReasoner.java | 2 +- 4 files changed, 52 insertions(+), 49 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java index c120c1ef9..464397b18 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java @@ -21,7 +21,7 @@ */ /** - * A statement is any element that a knowledge base can sonsist of, such as a + * A statement is any element that a knowledge base can consist of, such as a * {@link Rule}, {@link Fact}, or {@link DataSourceDeclaration}. * * @author Markus Kroetzsch diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 7e716479b..376806aaa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -16,7 +16,6 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -42,7 +41,7 @@ */ /** - * A knowledge base with rules, facts, and declartions for loading data from + * A knowledge base with rules, facts, and declarations for loading data from * further sources. This is a "syntactic" object in that it represents some * information that is not relevant for the semantics of reasoning, but that is * needed to ensure faithful re-serialisation of knowledge bases loaded from @@ -129,12 +128,12 @@ public Void visit(DataSourceDeclaration statement) { */ private final LinkedHashSet statements = new LinkedHashSet<>(); - /** - * Known prefixes that can be used to pretty-print the contents of the knowledge - * base. We try to preserve user-provided prefixes found in files when loading - * data. - */ - PrefixDeclarations prefixDeclarations; +// /** +// * Known prefixes that can be used to pretty-print the contents of the knowledge +// * base. We try to preserve user-provided prefixes found in files when loading +// * data. +// */ +// PrefixDeclarations prefixDeclarations; /** * Index structure that organises all facts by their predicate. @@ -172,13 +171,17 @@ public void deleteListener(KnowledgeBaseListener listener) { * @return true, if the knowledge base has changed. * @param statement */ - public boolean addStatement(Statement statement) { - Validate.notNull(statement, "Statement cannot be Null."); - if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { - this.statements.add(statement); + public void addStatement(Statement statement) { + if (doAddStatement(statement)) { - notifyListenersOnStatementAdded(statement); + } + notifyListenersOnStatementAdded(statement); + } + boolean doAddStatement(Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); return true; } return false; @@ -190,16 +193,15 @@ public boolean addStatement(Statement statement) { * @param statements */ public void addStatements(Collection statements) { - final Set addedStatements = new HashSet<>(); + final List addedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (addStatement(statement)) { + if (doAddStatement(statement)) { addedStatements.add(statement); } } notifyListenersOnStatementsAdded(addedStatements); - } /** @@ -208,10 +210,10 @@ public void addStatements(Collection statements) { * @param statements */ public void addStatements(Statement... statements) { - final Set addedStatements = new HashSet<>(); - + final List addedStatements = new ArrayList<>(); + for (final Statement statement : statements) { - if (addStatement(statement)) { + if (doAddStatement(statement)) { addedStatements.add(statement); } } @@ -219,9 +221,11 @@ public void addStatements(Statement... statements) { notifyListenersOnStatementsAdded(addedStatements); } - private void notifyListenersOnStatementsAdded(final Set addedStatements) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsAdded(addedStatements); + private void notifyListenersOnStatementsAdded(final List addedStatements) { + if (!addedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } } } @@ -286,7 +290,7 @@ void addFact(Fact fact) { * Returns all {@link Statement}s of this knowledge base. * * The result can be iterated over and will return statements in the original - * order. The collection is read-only and cannot be modified to add or delete + * order. The collection is read-only and cannot be modified to add or delete * statements. * * @return a collection of statements diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java index cae99a5a2..5639b54bc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -1,33 +1,32 @@ package org.semanticweb.vlog4j.core.reasoner; -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; +import java.util.List; import org.semanticweb.vlog4j.core.model.api.Statement; +/** + * Listener to {@link KnowledgeBase} content change events. + * + * @author Irina Dragoste + * + */ public interface KnowledgeBaseListener { - + + /** + * Event triggered whenever a new statement is added to the associated knowledge + * base. + * + * @param statementAdded new statement added to the knowledge base. + */ void onStatementAdded(Statement statementAdded); - - void onStatementsAdded(Set statementsAdded); + + /** + * Event triggered whenever new statements are added to the associated knowledge + * base. + * + * @param statementsAdded a list of new statements that have been added to the + * knowledge base. + */ + void onStatementsAdded(List statementsAdded); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 21b8a5706..07363b22d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -728,7 +728,7 @@ public CyclicityResult checkForCycles() { } @Override - public void onStatementsAdded(Set statementsAdded) { + public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling // updateReasonerStateToKnowledgeBaseChanged(); // updateMaterialisationStateOnStatementsAdded(statementsAddedInvalidateMaterialisation(statementsAdded)); From 5423e5506710df9eb4e8256eebd187e1e37f8156 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 18:38:46 +0200 Subject: [PATCH 0432/1255] KnowledgeBaseListener license header --- .../core/reasoner/KnowledgeBaseListener.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java index 5639b54bc..a98ee08d1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.core.reasoner; +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.List; import org.semanticweb.vlog4j.core.model.api.Statement; From d557a84a8082d2f140d1f464cdce60485d757359 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 18:58:51 +0200 Subject: [PATCH 0433/1255] change QueryResultIterator api --- .../core/reasoner/QueryResultIterator.java | 24 +++++++++++++++++-- .../EmptyQueryResultIterator.java | 10 ++++---- .../VLogQueryResultIterator.java | 10 ++++---- .../reasoner/implementation/VLogReasoner.java | 23 +++++++++--------- .../implementation/AddDataSourceTest.java | 20 ++++++++-------- .../implementation/ReasonerStateTest.java | 6 ++--- .../vlog4j/examples/ExamplesUtils.java | 2 +- 7 files changed, 58 insertions(+), 37 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java index e0ad5217c..743497b26 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java @@ -24,9 +24,29 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; +/** + * Iterator for {@link QueryResult}s. + * + * @author Irina Dragoste + * + */ public interface QueryResultIterator extends Iterator, AutoCloseable { - public Correctness getMaterialisationState(); - + /** + * Returns the correctness of the query result. + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
+ * + * @return query result correctness + */ + public Correctness getCorrectness(); + + @Override public void close(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java index d83bac081..1d1cd1575 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -32,10 +32,10 @@ */ public class EmptyQueryResultIterator implements QueryResultIterator { - final Correctness materialisationState; + final Correctness correctness; - public EmptyQueryResultIterator(Correctness materialisationState) { - this.materialisationState = materialisationState; + public EmptyQueryResultIterator(Correctness correctness) { + this.correctness = correctness; } @Override @@ -53,8 +53,8 @@ public QueryResult next() { return null; } - public Correctness getMaterialisationState() { - return this.materialisationState; + public Correctness getCorrectness() { + return this.correctness; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java index 648250fd9..043aea636 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java @@ -38,13 +38,12 @@ public class VLogQueryResultIterator implements QueryResultIterator { private final TermQueryResultIterator vLogTermQueryResultIterator; - private final Correctness materialisationState; + private final Correctness correctness; - // TODO add reasoningState to constructor public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, final Correctness materialisationState) { this.vLogTermQueryResultIterator = termQueryResultIterator; - this.materialisationState = materialisationState; + this.correctness = materialisationState; } @Override @@ -63,8 +62,9 @@ public void close() { this.vLogTermQueryResultIterator.close(); } - public Correctness getMaterialisationState() { - return this.materialisationState; + @Override + public Correctness getCorrectness() { + return this.correctness; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 07363b22d..cd25a8b8f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -74,12 +74,7 @@ /** * Reasoner implementation using the VLog backend. * - * @TODO Due to automatic predicate renaming, it can happen that an EDB - * predicate cannot be queried after loading unless reasoning has already - * been invoked (since the auxiliary rule that imports the EDB facts to - * the "real" predicate must be used). This issue could be weakened by - * rewriting queries to (single-source) EDB predicates internally when in - * such a state, + * * * @author Markus Kroetzsch * @@ -308,8 +303,15 @@ public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } - //@Override - void load() throws IOException { + /* + * TODO Due to automatic predicate renaming, it can happen that an EDB predicate + * cannot be queried after loading unless reasoning has already been invoked + * (since the auxiliary rule that imports the EDB facts to the "real" predicate + * must be used). This issue could be weakened by rewriting queries to + * (single-source) EDB predicates internally when in such a state, + */ + // @Override + void load() throws IOException { validateNotClosed(); switch (this.reasonerState) { @@ -357,7 +359,7 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty()? Correctness.SOUND_AND_COMPLETE: Correctness.SOUND_BUT_INCOMPLETE; + this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -620,8 +622,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", - this.correctness); + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index b387d4b80..720b80bf1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -76,12 +76,12 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateParity1, Expressions.makeVariable("x")), false)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } @@ -102,12 +102,12 @@ public void testAddDataSourceBeforeLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } @@ -130,13 +130,13 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } } @@ -158,13 +158,13 @@ public void testAddDataSourceAfterReasoning() throws IOException { try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } } @@ -231,7 +231,7 @@ public void testAddMultipleDataSourcesForPredicate() throws IOException { expectedAnswers.addAll(csvFile_c_d_Content); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } @@ -256,7 +256,7 @@ public void testAddDataSourceAndFactsForPredicate() throws IOException { expectedAnswers.add(Arrays.asList(Expressions.makeConstant("a"))); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java index f16fcd530..d81a714a1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java @@ -92,7 +92,7 @@ public void testAddFactsAndQuery() throws IOException { final Set> expectedAnswersC = new HashSet<>(Arrays.asList(Collections.singletonList(c))); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); assertEquals(expectedAnswersC, queryAnswersC); @@ -101,14 +101,14 @@ public void testAddFactsAndQuery() throws IOException { reasoner.getKnowledgeBase().addStatement(factPd); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(Correctness.INCORRECT, queryResult.getMaterialisationState()); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); } reasoner.load(); try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getMaterialisationState()); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 03cc92e2e..55dbbf354 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -91,7 +91,7 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { answers.forEachRemaining(answer -> System.out.println(" - " + answer)); - System.out.println("Query answers are: " + answers.getMaterialisationState()); + System.out.println("Query answers are: " + answers.getCorrectness()); } System.out.println(); } From 61ecc40d3caf1e5fa0181d3b8f1bf5c9e911eb81 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 19:08:22 +0200 Subject: [PATCH 0434/1255] update Reasoner interface level javaodc --- .../vlog4j/core/reasoner/Reasoner.java | 38 ++++++------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f55b5e2a1..61b905664 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -2,9 +2,9 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.TermType; @@ -40,25 +40,19 @@ * reasoning.
* Facts can be added to the knowledge base: *
    - *
  • as in-memory Java objects ({@link #addFacts(Atom...)}
  • - *
  • from a persistent data source - * ({@link #addFactsFromDataSource(Predicate, DataSource)})
  • + *
  • as in-memory Java objects ({@link Fact})
  • + *
  • from a persistent data source ({@link DataSourceDeclaration})
  • *
- * Note that facts with the same predicate cannot come from multiple sources - * (where a source can be a collection of in-memory {@link Atom} objects, or a - * {@link DataSource} .
- * Rules added to the knowledge base ({@link #addRules(Rule...)}) can be - * re-written internally by VLog, using the corresponding set - * {@link RuleRewriteStrategy}.
*
- * Once adding facts and rules to the knowledge base has been completed, the - * knowledge base can be loaded into the reasoner. + * Rules added to the knowledge base can be re-written internally by + * VLog, using the corresponding set {@link RuleRewriteStrategy}.
+ *
* - * The loaded reasoner can perform atomic queries on explicit facts - * before reasoning, and all implicit and explicit facts after calling - * {@link Reasoner#reason()}. Queries can provide an iterator for the results - * ({@link #answerQuery(Atom, boolean)}, or the results can be exported to a - * file ({@link #exportQueryAnswersToCsv(Atom, String, boolean)}).
+ * The loaded reasoner can perform atomic queries on explicit and + * implicit facts after calling {@link Reasoner#reason()}. Queries can provide + * an iterator for the results ({@link #answerQuery(Atom, boolean)}, or the + * results can be exported to a file + * ({@link #exportQueryAnswersToCsv(Atom, String, boolean)}).
*
* Reasoning with various {@link Algorithm}s is supported, that can lead * to different sets of inferred facts and different termination behavior. In @@ -67,14 +61,6 @@ * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it * leads to termination in more cases. To avoid non-termination, a reasoning * timeout can be set ({@link Reasoner#setReasoningTimeout(Integer)}).
- * Incremental reasoning is not supported. To add more facts and rule to - * the knowledge base and reason again, the reasoner needs to be - * reset ({@link #resetReasoner()}) to the state of its knowledge base - * before loading. Then, more information can be added to the knowledge base, - * the reasoner can be loaded again, and querying and reasoning can be - * performed. - * - * @FIXME Update the outdated JavaDoc * * @author Irina Dragoste * From ae01347185e11807dc4b2c9d6009505ed5c6d9ff Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 19:17:56 +0200 Subject: [PATCH 0435/1255] update Reasoner javadoc --- .../vlog4j/core/reasoner/Reasoner.java | 31 ++++++------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 61b905664..975e0bb3d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -184,11 +184,11 @@ public static Reasoner getInstance() { /** * Check the Joint Acyclicity (JA) property of loaded rules and EDB - * predicates of loaded facts. If a set of rules and EDB predicates is JA, then, + * predicates of loaded facts. If a set of rules and EDB predicates is JA, then, * for the given set of rules and any facts over the given EDB predicates, * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always - * terminate + * terminate. * * @return {@code true}, if the loaded set of rules is Joint Acyclic with * respect to the EDB predicates of loaded facts.
@@ -199,7 +199,7 @@ public static Reasoner getInstance() { /** * Check the Restricted Joint Acyclicity (RJA) property of loaded rules * and EDB predicates of loaded facts. If a set of rules and EDB predicates is - * RJA, then, for the given set of rules and any facts over the given EDB + * RJA, then, for the given set of rules and any facts over the given EDB * predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE Restricted chase} * will always terminate * @@ -211,7 +211,7 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Acyclicity (MFA) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFA, + * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFA, * then, for the given set of rules and any facts over the given EDB predicates, * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always @@ -226,7 +226,7 @@ public static Reasoner getInstance() { /** * Check the Restricted Model-Faithful Acyclicity (RMFA) property of * loaded rules and EDB predicates of loaded facts. If a set of rules and EDB - * predicates is RMFA, then, for the given set of rules and any facts over the + * predicates is RMFA, then, for the given set of rules and any facts over the * given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE * Restricted chase} will always terminate. If a set of rules and EDB predicates * is MFA, then it is also JA. @@ -239,7 +239,7 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Cyclicity (MFC) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFC, + * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFC, * then there exists a set of facts over the given EDB predicates for which * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} algorithm is * guaranteed not to terminate for the loaded rules. If a set of rules and EDB @@ -253,8 +253,8 @@ public static Reasoner getInstance() { boolean isMFC(); /** - * Performs reasoning on the loaded knowledge base, depending on the set - * {@link Algorithm}. Reasoning implies extending the set of explicit facts in + * Performs materialisation on the reasoner {@link KnowledgeBase}, depending on the set + * {@link Algorithm}. Materialisation implies extending the set of explicit facts in * the knowledge base with implicit facts inferred by knowledge base rules.
*
* In some cases, reasoning with rules with existentially quantified variables @@ -263,21 +263,10 @@ public static Reasoner getInstance() { * leads to termination in more cases.
* To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
- *
- * Incremental reasoning is not supported. To add more facts and rule to - * the knowledge base and reason again, the reasoner needs to be - * reset ({@link #resetReasoner()}) to the state of its knowledge base - * before loading. Then, more information can be added to the knowledge base, - * the reasoner can be loaded again, and querying and reasoning can be - * performed. - * * @return *
    - *
  • the value returned by the previous {@link Reasoner#reason()} - * call, if successive reasoning is attempted before a - * {@link Reasoner#resetReasoner()}.
  • - *
  • {@code true}, if reasoning reached completion.
  • - *
  • {@code false}, if reasoning has been interrupted before + *
  • {@code true}, if materialisation reached completion.
  • + *
  • {@code false}, if materialisation has been interrupted before * completion.
  • *
* @throws IOException if I/O exceptions occur during reasoning. From 64e8b83fd4d9f82d210a0db9f134e95e50db7070 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 27 Aug 2019 19:26:29 +0200 Subject: [PATCH 0436/1255] update Reasoner javadoc --- .../vlog4j/core/reasoner/Reasoner.java | 53 +++++++++---------- .../reasoner/implementation/VLogReasoner.java | 12 +++-- 2 files changed, 34 insertions(+), 31 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 975e0bb3d..e584fb3e5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -184,11 +184,11 @@ public static Reasoner getInstance() { /** * Check the Joint Acyclicity (JA) property of loaded rules and EDB - * predicates of loaded facts. If a set of rules and EDB predicates is JA, then, - * for the given set of rules and any facts over the given EDB predicates, - * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, - * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always - * terminate. + * predicates of loaded facts. If a set of rules and EDB predicates is + * JA, then, for the given set of rules and any facts over the given EDB + * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, + * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will + * always terminate. * * @return {@code true}, if the loaded set of rules is Joint Acyclic with * respect to the EDB predicates of loaded facts.
@@ -211,11 +211,11 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Acyclicity (MFA) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFA, - * then, for the given set of rules and any facts over the given EDB predicates, - * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, implicitly, - * the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will always - * terminate + * EDB predicates of loaded facts. If a set of rules and EDB predicates is + * MFA, then, for the given set of rules and any facts over the given EDB + * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, + * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will + * always terminate * * @return {@code true}, if the loaded set of rules is Model-Faithful Acyclic * with respect to the EDB predicates of loaded facts.
@@ -226,8 +226,8 @@ public static Reasoner getInstance() { /** * Check the Restricted Model-Faithful Acyclicity (RMFA) property of * loaded rules and EDB predicates of loaded facts. If a set of rules and EDB - * predicates is RMFA, then, for the given set of rules and any facts over the - * given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE + * predicates is RMFA, then, for the given set of rules and any facts + * over the given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE * Restricted chase} will always terminate. If a set of rules and EDB predicates * is MFA, then it is also JA. * @@ -239,12 +239,12 @@ public static Reasoner getInstance() { /** * Check the Model-Faithful Cyclicity (MFC) property of loaded rules and - * EDB predicates of loaded facts. If a set of rules and EDB predicates is MFC, - * then there exists a set of facts over the given EDB predicates for which - * reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} algorithm is - * guaranteed not to terminate for the loaded rules. If a set of rules and EDB - * predicates is RMFA, then it is also RJA. Therefore, if a set or rules and EDB - * predicates is MFC, it is not MFA, nor JA. + * EDB predicates of loaded facts. If a set of rules and EDB predicates is + * MFC, then there exists a set of facts over the given EDB predicates + * for which reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} algorithm + * is guaranteed not to terminate for the loaded rules. If a set of rules and + * EDB predicates is RMFA, then it is also RJA. Therefore, if a set or rules and + * EDB predicates is MFC, it is not MFA, nor JA. * * @return {@code true}, if the loaded set of rules is Model-Faithful Cyclic * with respect to the EDB predicates of loaded facts.
@@ -253,9 +253,10 @@ public static Reasoner getInstance() { boolean isMFC(); /** - * Performs materialisation on the reasoner {@link KnowledgeBase}, depending on the set - * {@link Algorithm}. Materialisation implies extending the set of explicit facts in - * the knowledge base with implicit facts inferred by knowledge base rules.
+ * Performs materialisation on the reasoner {@link KnowledgeBase}, depending on + * the set {@link Algorithm}. Materialisation implies extending the set of + * explicit facts in the knowledge base with implicit facts inferred by + * knowledge base rules.
*
* In some cases, reasoning with rules with existentially quantified variables * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We @@ -263,6 +264,7 @@ public static Reasoner getInstance() { * leads to termination in more cases.
* To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
+ * * @return *
* * - * @param query a {@link PositiveLiteral} representing the query to be - * answered. - * @param includeBlanks if {@code true}, {@link QueryResult}s containing terms - * of type {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain the {@link QueryResult}s with - * terms of type {@link TermType#CONSTANT} (representing - * named individuals). + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of + * type {@link TermType#NAMED_NULL} (representing anonymous + * individuals introduced to satisfy rule existentially + * quantified variables) will be included. Otherwise, the + * answers will only contain the {@link QueryResult}s with + * terms of type {@link TermType#CONSTANT} (representing + * named individuals). * @return QueryResultIterator that iterates over distinct answers to the query. * It also contains the {@link Correctness} of the query answers. */ - QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks); + QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** @@ -341,24 +341,24 @@ public static Reasoner getInstance() { * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer * fact at the same term position, and the {@link TermType#VARIABLE} terms of * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The same - * variable name identifies the same term in the answer fact.
+ * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The + * same variable name identifies the same term in the answer fact.
* A query can have multiple, distinct query answers. Each answers is written on * a separate line in the given file. * - * @param query a {@link PositiveLiteral} representing the query to be - * answered. - * @param csvFilePath path to a .csv file where the query - * answers will be written. Each line of the - * .csv file represents a query answer, and - * it will contain the fact term names as columns. - * @param includeBlanks if {@code true}, answers containing terms of type - * {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain those with terms of type - * {@link TermType#CONSTANT} (representing named - * individuals). + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param csvFilePath path to a .csv file where the query answers + * will be written. Each line of the .csv file + * represents a query answer, and it will contain the fact + * term names as columns. + * @param includeNulls if {@code true}, answers containing terms of type + * {@link TermType#NAMED_NULL} (representing anonymous + * individuals introduced to satisfy rule existentially + * quantified variables) will be included. Otherwise, the + * answers will only contain those with terms of type + * {@link TermType#CONSTANT} (representing named + * individuals). * * @throws IOException if an I/O error occurs regarding given file * ({@code csvFilePath)}. @@ -389,7 +389,7 @@ public static Reasoner getInstance() { * * */ - Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeBlanks) + Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeNulls) throws IOException; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java index ca892cc35..400943c75 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java @@ -90,8 +90,6 @@ public static String getVLogNameForConstant(Constant constant) { * Converts the string representation of a constant in VLog4j directly to the * name of a constant in VLog, without parsing it into a {@link Constant} first. * - * TODO Is this the appropriate location for this code? - * * @param vLog4jConstantName * @return VLog constant string */ @@ -124,7 +122,7 @@ public karmaresearch.vlog.Term visit(ExistentialVariable term) { } /** - * Transforms a Blank to a {@link karmaresearch.vlog.Term} with the same name + * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. */ @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java index aa4759c0c..27fd1a2d5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java @@ -23,6 +23,7 @@ import java.util.ArrayList; import java.util.List; +import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; @@ -81,23 +82,7 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { case CONSTANT: - if (name.charAt(0) == '<' && name.charAt(name.length() - 1) == '>') { // strip <> off IRIs - return new AbstractConstantImpl(name.substring(1, name.length() - 1)); - } else if (name.charAt(0) == '"') { - if (name.charAt(name.length() - 1) == '>') { - int startTypeIdx = name.lastIndexOf('<', name.length() - 2); - String datatype = name.substring(startTypeIdx + 1, name.length() - 1); - String lexicalValue = name.substring(1, startTypeIdx - 3); - return new DatatypeConstantImpl(lexicalValue, datatype); - } else { - int startTypeIdx = name.lastIndexOf('@', name.length() - 2); - String languageTag = name.substring(startTypeIdx + 1, name.length()); - String string = name.substring(1, startTypeIdx - 1); - return new LanguageStringConstantImpl(string, languageTag); - } - } else { - return new AbstractConstantImpl(name); - } + return toConstant(name); case BLANK: return new NamedNullImpl(name); case VARIABLE: @@ -108,4 +93,31 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { } } + /** + * Creates a {@link Constant} from the given VLog constant name. + * + * @param vLogConstantName the string name used by VLog + * @return {@link Constant} object + */ + private static Constant toConstant(String vLogConstantName) { + if (vLogConstantName.charAt(0) == '<' && vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + // strip <> off of IRIs + return new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); + } else if (vLogConstantName.charAt(0) == '"') { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + int startTypeIdx = vLogConstantName.lastIndexOf('<', vLogConstantName.length() - 2); + String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); + String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); + return new DatatypeConstantImpl(lexicalValue, datatype); + } else { + int startTypeIdx = vLogConstantName.lastIndexOf('@', vLogConstantName.length() - 2); + String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); + String string = vLogConstantName.substring(1, startTypeIdx - 1); + return new LanguageStringConstantImpl(string, languageTag); + } + } else { + return new AbstractConstantImpl(vLogConstantName); + } + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index 0b46a1e38..cb787b901 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -50,9 +50,9 @@ public void testGettersLiterals() { final Literal positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); final NegativeLiteral negativeLiteral2 = Expressions.makeNegativeLiteral("p", y, x); final Literal positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); - final Literal negativeLiteral4 = Expressions.makePositiveLiteral("q", y, d, z); + final Literal positiveLiteral4 = Expressions.makePositiveLiteral("q", y, d, z); final List literalList = Arrays.asList(positiveLiteral1, negativeLiteral2, positiveLiteral3, - negativeLiteral4); + positiveLiteral4); final Conjunction conjunction = new ConjunctionImpl<>(literalList); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java index 8c3a7d8c1..c358cecdf 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java @@ -46,9 +46,6 @@ public void testGetters() { final PositiveLiteral atomP = Expressions.makePositiveLiteral("p", x, c, d, y); final PositiveLiteral atomQ = Expressions.makePositiveLiteral("q", c, d); -// final Set variables = Sets.newSet(x, y); -// final Set constants = Sets.newSet(c, d); - assertEquals("p", atomP.getPredicate().getName()); assertEquals(atomP.getArguments().size(), atomP.getPredicate().getArity()); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index b84ef8324..3f3ecf02f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -91,7 +91,6 @@ public void testEquals() { final Rule rule4 = new RuleImpl(bodyPositiveLiterals, bodyLiterals); final Rule rule5 = new RuleImpl(bodyPositiveLiterals, bodyLiterals); -// assertNotEquals(rule3, rule1); assertNotEquals(rule4, rule1); assertNotEquals(rule5, rule1); assertFalse(rule1.equals(null)); diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java index ad67c6ba7..bec53d7fb 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java @@ -60,7 +60,10 @@ private GraalToVLog4JModelConverter() { * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a * {@link PositiveLiteral VLog4J PositiveLiteral}. * - * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} + * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom + * Graal Atom} + * @param existentialVariables set of variables that are existentially + * quantified * @return A {@link PositiveLiteral VLog4J PositiveLiteral} */ public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core.Atom atom, @@ -122,7 +125,9 @@ public static List convertAtomsToFacts(final List convertAtomSet(final AtomSet atomSet, @@ -237,7 +242,9 @@ public static List convertRules(final List replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); } + return new PositiveLiteralImpl(literal.getPredicate(), + literal.getTerms().map(term -> replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); } /** @@ -208,7 +207,7 @@ void startAxiomConversion() { /** * Processes an OWL class inclusion axiom with the two class expressions as - * give, and adds the resulting rules. The method proceeds by first converting + * given, and adds the resulting rules. The method proceeds by first converting * the superclass, then converting the subclass with the same body and head atom * buffers, and finally creating a rule from the collected body and head. The * conversions may lead to auxiliary rules being created during processing, so From 23cc89fc0281e01188cc0209d7f96145217ae4a1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Oct 2019 15:31:03 +0200 Subject: [PATCH 0578/1255] simplified code as per review --- .../owlapi/OwlAxiomToRulesConverter.java | 38 ++++++++++--------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index f15f57abc..7d2294220 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -129,32 +129,34 @@ Variable getFreshExistentialVariable() { * @param converter */ void addRule(final AbstractClassToRuleConverter converter) { - if (converter.isTautology()) { - return; - } - Conjunction headConjunction; - if (converter.head.isFalseOrEmpty()) { - headConjunction = new ConjunctionImpl<>( - Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.mainTerm))); - } else { - headConjunction = new ConjunctionImpl<>(converter.head.getConjuncts()); - } + if (!converter.isTautology()) { + final Conjunction headConjunction = this.constructHeadConjunction(converter); - Conjunction bodyConjunction; - if (converter.body.isTrueOrEmpty()) { - bodyConjunction = new ConjunctionImpl<>( - Arrays.asList(OwlToRulesConversionHelper.getTop(converter.mainTerm))); - if (headConjunction.getVariables().count() == 0) { + if (converter.body.isTrueOrEmpty() && (headConjunction.getVariables().count() == 0)) { for (final PositiveLiteral conjunct : headConjunction.getLiterals()) { this.facts.add(new FactImpl(conjunct.getPredicate(), conjunct.getArguments())); } - return; + } else { + final Conjunction bodyConjunction = this.constructBodyConjunction(converter); + this.rules.add(Expressions.makePositiveLiteralsRule(headConjunction, bodyConjunction)); } + } + } + + private Conjunction constructBodyConjunction(final AbstractClassToRuleConverter converter) { + if (converter.body.isTrueOrEmpty()) { + return new ConjunctionImpl<>(Arrays.asList(OwlToRulesConversionHelper.getTop(converter.mainTerm))); } else { - bodyConjunction = new ConjunctionImpl<>(converter.body.getConjuncts()); + return new ConjunctionImpl<>(converter.body.getConjuncts()); } + } - this.rules.add(Expressions.makePositiveLiteralsRule(headConjunction, bodyConjunction)); + private Conjunction constructHeadConjunction(final AbstractClassToRuleConverter converter) { + if (converter.head.isFalseOrEmpty()) { + return new ConjunctionImpl<>(Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.mainTerm))); + } else { + return new ConjunctionImpl<>(converter.head.getConjuncts()); + } } Term replaceTerm(Term term, Term oldTerm, Term newTerm) { From 41258c476776b99c707274c67dc3385535cdd80b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Oct 2019 15:40:41 +0200 Subject: [PATCH 0579/1255] Nicer stream merging as per review --- .../core/model/implementation/ConjunctionImpl.java | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 7c5a604ed..726cefc9a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -55,18 +55,9 @@ public List getLiterals() { return Collections.unmodifiableList(this.literals); } - @SuppressWarnings("resource") @Override public Stream getTerms() { - Stream result = null; - for (final T literal : this.literals) { - if (result == null) { - result = literal.getTerms(); - } else { - result = Stream.concat(result, literal.getTerms()); - } - } - return result.distinct(); + return this.literals.stream().flatMap(l -> l.getTerms()).distinct(); } @Override From 091344f69f650436e60d7ad571fcd67ab1a4ec7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Markus=20Kr=C3=B6tzsch?= Date: Fri, 25 Oct 2019 15:56:53 +0200 Subject: [PATCH 0580/1255] Notes on data model changes --- RELEASE-NOTES.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 6a396b8e8..a94bb0447 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,6 +1,23 @@ VLog4j Release Notes ==================== +VLog4j v0.5.0 +------------- + +Breaking changes: +* The data model for rules has been refined and changed: + * Instead of Constant, specific types of constants are used to capture abtract and data values + * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification + * Bland was renamed to NamedNull to avoid confusion with RDF blank nodes + * Methods to access terms now use Java Streams and are unified across syntactic objects + +New features: +* ... + +Other improvements: +* Data model is better aligned with syntax supported by parser + + VLog4j v0.4.0 ------------- From bbfdc960c6f16a5c15744abd9d64b124d24093d2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Oct 2019 17:33:40 +0200 Subject: [PATCH 0581/1255] Quick and dirty fix for #128 --- RELEASE-NOTES.md | 3 +++ .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 7 +++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index a94bb0447..049a8e72e 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -16,6 +16,9 @@ New features: Other improvements: * Data model is better aligned with syntax supported by parser + +Bugfixes: +* Acyclicity checks work again without calling reason() first (issue #128) VLog4j v0.4.0 diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 8c295cdac..d355e17f2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -705,8 +705,11 @@ public boolean isMFC() { private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Checking rules acyclicity is not allowed before loading!"); + try { + load(); + } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } } CyclicCheckResult checkCyclic; From 012a8ca56441c039dcf53bf9402387e9cd828474 Mon Sep 17 00:00:00 2001 From: Ali Elhalawati Date: Sun, 27 Oct 2019 15:21:13 +0100 Subject: [PATCH 0582/1255] Added Serializer Class with toString methods --- .../core/model/implementation/Serializer.java | 72 +++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java new file mode 100644 index 000000000..487ffbf1d --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -0,0 +1,72 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; + +/** + * Simple class implementation of various toString methods to ensure the correct + * parsable string output of the different Data models. + * + * @author Ali Elhalawati + * + */ +public class Serializer { + + public Serializer() { + + } + + public static String getConjunctionString(Conjunction conjunction) { + return conjunction.toString(); + } + + public static String getConstantString(Constant constant) { + return constant.toString(); + } + + public static String getPredicateString(Predicate predicate) { + return predicate.toString(); + } + + public static String getVariableString(Variable variable) { + return variable.toString(); + } + + public static String getRuleString(Rule rule) { + return rule.toString() + " ."; + } + + public static String getLiteralString(Literal literal) { + return literal.toString(); + } + + public static String getFactString(Fact fact) { + return fact.toString() + "."; + } + +} From a6a42635d3731520dbc22d83dbc23338a8522242 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 28 Oct 2019 16:08:47 +0100 Subject: [PATCH 0583/1255] fix tostring in Serializer and datamodels --- LICENSE.txt | 402 +++++++++--------- .../implementation/AbstractConstantImpl.java | 2 +- .../implementation/AbstractLiteralImpl.java | 17 +- .../model/implementation/ConjunctionImpl.java | 2 +- .../DataSourceDeclarationImpl.java | 2 +- .../implementation/DatatypeConstantImpl.java | 2 +- .../ExistentialVariableImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../model/implementation/NamedNullImpl.java | 2 +- .../core/model/implementation/RuleImpl.java | 2 +- .../core/model/implementation/Serializer.java | 101 +++++ .../implementation/UniversalVariableImpl.java | 2 +- vlog4j-examples/LICENSE.txt | 402 +++++++++--------- vlog4j-graal/LICENSE.txt | 402 +++++++++--------- vlog4j-parser/LICENSE.txt | 402 +++++++++--------- 15 files changed, 915 insertions(+), 829 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java diff --git a/LICENSE.txt b/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java index 84cccf81f..86e97a061 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java @@ -47,6 +47,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return this.getName(); + return Serializer.getConstantString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java index c5b43cfb1..80bd418e6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java @@ -94,22 +94,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - final StringBuilder stringBuilder = new StringBuilder(""); - if (this.isNegated()) { - stringBuilder.append("~"); - } - stringBuilder.append(this.getPredicate().getName()).append("("); - boolean first = true; - for (final Term term : this.getArguments()) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(term); - } - stringBuilder.append(")"); - return stringBuilder.toString(); + return Serializer.getLiteralString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 726cefc9a..69df24198 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -95,7 +95,7 @@ public String toString() { } else { stringBuilder.append(", "); } - stringBuilder.append(literal.toString()); + stringBuilder.append(Serializer.getLiteralString((AbstractLiteralImpl) literal)); } return stringBuilder.toString(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index 150475b47..217538610 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -85,7 +85,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return "@source " + this.predicate.toString() + "(" + this.predicate.getArity() + ") : " + return "@source " + Serializer.getPredicateString(this.predicate) + "(" + this.predicate.getArity() + ") : " + this.dataSource.toConfigString() + " ."; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index cb9d1a82f..907758747 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -60,7 +60,7 @@ public String getLexicalValue() { @Override public String toString() { - return this.getName(); + return Serializer.getDatatypeConstantString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java index ff0d893ca..a570bd615 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return "!" + this.getName(); + return Serializer.getExistentialVarString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index ea99774c2..084143187 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -84,7 +84,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return getName(); + return Serializer.getLanguageConstantString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java index c676bd2f3..d6b1d2e9b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java @@ -48,6 +48,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return "_" + this.getName(); + return Serializer.getNamedNullString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java index 135ded2d6..c83e79a30 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java @@ -105,7 +105,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - return this.head + " :- " + this.body; + return Serializer.getRuleString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java new file mode 100644 index 000000000..9402fe467 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -0,0 +1,101 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +import java.util.List; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Variable; + +/** + * Simple class implementation of various toString methods to ensure the correct + * parsable string output of the different Data models. + * + * @author Ali Elhalawati + * + */ +public class Serializer { + + public Serializer() { + + } + + public static String getRuleString(RuleImpl rule) { + return rule.getHead() + " :- " + rule.getBody() + "."; + } + + public static String getLiteralString(AbstractLiteralImpl literal) { + final StringBuilder stringBuilder = new StringBuilder(""); + if (literal.isNegated()) { + stringBuilder.append("~"); + } + stringBuilder.append(literal.getPredicate().getName()).append("("); + boolean first = true; + for (final Term term : literal.getArguments()) { + if (first) { + first = false; + } else { + stringBuilder.append(", "); + } + stringBuilder.append(term); + } + stringBuilder.append(")"); + return stringBuilder.toString(); + } + + public static String getFactString(FactImpl fact) { + return fact.toString() + "."; + } + + public static String getConstantString(AbstractConstantImpl constant) { + return constant.getName(); + } + + public static String getExistentialVarString(ExistentialVariableImpl existentialvariable) { + return "!" + existentialvariable.getName(); + } + + public static String getUniversalVarString(UniversalVariableImpl universalvariable) { + return "?" + universalvariable.getName(); + } + + public static String getDatatypeConstantString(DatatypeConstantImpl datatypeconstant) { + return datatypeconstant.getName(); + } + + public static String getLanguageConstantString(LanguageStringConstantImpl languagestringconstant) { + return languagestringconstant.getName(); + } + + public static String getNamedNullString(NamedNullImpl namednull) { + return "_" + namednull.toString(); + } + public static String getPredicateString(Predicate predicate) { + return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java index eaec2eb74..838eab7f3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return "?" + this.getName(); + return Serializer.getUniversalVarString(this); } } diff --git a/vlog4j-examples/LICENSE.txt b/vlog4j-examples/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-examples/LICENSE.txt +++ b/vlog4j-examples/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-graal/LICENSE.txt b/vlog4j-graal/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-graal/LICENSE.txt +++ b/vlog4j-graal/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-parser/LICENSE.txt b/vlog4j-parser/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-parser/LICENSE.txt +++ b/vlog4j-parser/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From cc3f1b565a0165118576cf0644451fd7bd865a18 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 28 Oct 2019 16:58:17 +0100 Subject: [PATCH 0584/1255] test trial --- .../vlog4j/core/model/implementation/Serializer.java | 4 ---- .../java/org/semanticweb/vlog4j/core/model/RuleImplTest.java | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 9402fe467..c4b6fc118 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -40,10 +40,6 @@ */ public class Serializer { - public Serializer() { - - } - public static String getRuleString(RuleImpl rule) { return rule.getHead() + " :- " + rule.getBody() + "."; } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index 3f3ecf02f..be3cee765 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -78,6 +78,7 @@ public void testEquals() { final Rule rule8 = Expressions.makePositiveLiteralsRule(headPositiveLiterals, bodyPositiveLiterals); assertEquals(rule1, rule1); + assertEquals(rule1.toString(),"q(?X, !Y) :- p(?X, c), p(?X, ?Z)."); assertEquals(rule2, rule1); assertEquals(rule2.hashCode(), rule1.hashCode()); @@ -95,6 +96,7 @@ public void testEquals() { assertNotEquals(rule5, rule1); assertFalse(rule1.equals(null)); assertFalse(rule1.equals(c)); + } @Test(expected = IllegalArgumentException.class) From f0a149e91b1049cb03a56c43aa3fc4b655e02c75 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 30 Oct 2019 19:16:43 +0100 Subject: [PATCH 0585/1255] update helper classes in client; update unit test for client --- .../client/picocli/PrintQueryResults.java | 31 +-- .../org/vlog4j/client/picocli/SaveModel.java | 39 +++- .../client/picocli/SaveQueryResults.java | 53 +++-- .../vlog4j/client/picocli/VLog4jClient.java | 2 +- .../picocli/VLog4jClientMaterialize.java | 51 +++-- .../client/picocli/PrintQueryResultsTest.java | 78 +++---- .../vlog4j/client/picocli/SaveModelTest.java | 190 ++++++++--------- .../client/picocli/SaveQueryResultsTest.java | 192 ++++++++---------- 8 files changed, 314 insertions(+), 322 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java index dbf8039e8..cd32d547a 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java @@ -1,7 +1,5 @@ package org.vlog4j.client.picocli; -import javax.naming.ConfigurationException; - /*- * #%L * VLog4j Client @@ -32,6 +30,8 @@ */ public class PrintQueryResults { + static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true.\n Exiting the program."; + /** * If true, Vlog4jClient will print the size of the query result. Mutually * exclusive with {@code --print-complete-query-result} @@ -50,37 +50,42 @@ public class PrintQueryResults { @Option(names = "--print-complete-query-result", description = "Boolean. If true, Vlog4jClient will print the query result in stdout. False by default.") private boolean complete = false; + public PrintQueryResults() { + } + + public PrintQueryResults(boolean sizeOnly, boolean complete) { + this.sizeOnly = sizeOnly; + this.complete = complete; + } + /** * Check correct configuration of the class. @code{--print-query-result-size} * and @code{--print-query-result} are mutually exclusive. * - * @throws ConfigurationException + * @return @code{true} if configuration is valid. */ - public void validate() throws ConfigurationException { - String error = "@code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true."; - if (sizeOnly && complete) { - throw new ConfigurationException(error); - } + protected boolean isValid() { + return !sizeOnly || !complete; } - public void printConfiguration() { + protected void printConfiguration() { System.out.println(" --print-query-result-size: " + sizeOnly); System.out.println(" --print-complete-query-result: " + complete); } - public boolean isSizeOnly() { + protected boolean isSizeOnly() { return sizeOnly; } - public void setSizeOnly(boolean sizeOnly) { + protected void setSizeOnly(boolean sizeOnly) { this.sizeOnly = sizeOnly; } - public boolean isComplete() { + protected boolean isComplete() { return complete; } - public void setComplete(boolean complete) { + protected void setComplete(boolean complete) { this.complete = complete; } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java index 1ec6d0bac..b74a9b43a 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java @@ -22,8 +22,6 @@ import java.io.File; -import javax.naming.ConfigurationException; - import picocli.CommandLine.Option; /** @@ -34,6 +32,9 @@ */ public class SaveModel { + static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required.\nExiting the program."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path.\nExiting the program."; + /** * If true, Vlog4jClient will save the model in {@code --output-model-directory} * @@ -50,25 +51,43 @@ public class SaveModel { @Option(names = "--output-model-directory", description = "Directory to store the model. Used only if --store-model is true. \"model\" by default.") private String outputModelDirectory = "model"; + public SaveModel() { + } + + public SaveModel(boolean saveModel, String outputDir) { + this.saveModel = saveModel; + this.outputModelDirectory = outputDir; + } + /** * Check correct configuration of the class. If @code{--save-model} is true, * then a non-empty @code{--output-model-directory} is required. * - * @throws ConfigurationException + * @return @code{true} if configuration is valid. */ - public void validate() throws ConfigurationException { - String error_message = "If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required."; - if (saveModel && (outputModelDirectory == null || outputModelDirectory.isEmpty())) { - throw new ConfigurationException(error_message); - } + protected boolean isConfigurationValid() { + return !saveModel || (outputModelDirectory != null && !outputModelDirectory.isEmpty()); + } + + /** + * Check that the path to store the model is either non-existing or a directory. + * + * @return @code{true} if conditions are satisfied. + */ + protected boolean isDirectoryValid() { + File file = new File(outputModelDirectory); + return !file.exists() || file.isDirectory(); } /** * Create directory to store the model */ - public void prepare() { + public void mkdir() { if (saveModel) { - new File(outputModelDirectory).mkdirs(); + File file = new File(outputModelDirectory); + if (!file.exists()) { + file.mkdirs(); + } } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java index 0bebbd58b..a0c20f91f 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java @@ -22,8 +22,6 @@ import java.io.File; -import javax.naming.ConfigurationException; - import picocli.CommandLine.Option; /** @@ -34,6 +32,9 @@ */ public class SaveQueryResults { + static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required.\nExiting the program."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path.\nExiting the program."; + /** * If true, Vlog4jClient will save the query result in * {@code --output-query-result-directory} @@ -52,46 +53,66 @@ public class SaveQueryResults { @Option(names = "--output-query-result-directory", description = "Directory to store the model. Used only if --save-query-results is true. \"query-results\" by default.") private String outputQueryResultDirectory = "query-results"; + public SaveQueryResults() { + } + + public SaveQueryResults(boolean saveResults, String outputDir) { + this.saveResults = saveResults; + this.outputQueryResultDirectory = outputDir; + } + /** * Check correct configuration of the class. If @code{--save-query-results} is * true, then a non-empty @code{--output-query-result-directory} is required. * - * @throws ConfigurationException + * @return @code{true} if configuration is valid. */ - public void validate() throws ConfigurationException { - String error_message = "If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required."; - if (saveResults && (outputQueryResultDirectory == null || outputQueryResultDirectory.isEmpty())) { - throw new ConfigurationException(error_message); - } + protected boolean isConfigurationValid() { + return !saveResults || (outputQueryResultDirectory != null && !outputQueryResultDirectory.isEmpty()); + } + + /** + * Check that the path to store the query results is either non-existing or a + * directory. + * + * @return @code{true} if conditions are satisfied. + */ + protected boolean isDirectoryValid() { + File file = new File(outputQueryResultDirectory); + return !file.exists() || file.isDirectory(); } /** - * Create directory to store query results + * Create directory to store query results if not present. It assumes that + * configuration and directory are valid. */ - public void prepare() { + protected void mkdir() { if (saveResults) { - new File(outputQueryResultDirectory).mkdirs(); + File file = new File(outputQueryResultDirectory); + if (!file.exists()) { + file.mkdirs(); + } } } - public void printConfiguration() { + protected void printConfiguration() { System.out.println(" --save-query-results: " + saveResults); System.out.println(" --output-query-result-directory: " + outputQueryResultDirectory); } - public boolean isSaveResults() { + protected boolean isSaveResults() { return saveResults; } - public void setSaveResults(boolean saveResults) { + protected void setSaveResults(boolean saveResults) { this.saveResults = saveResults; } - public String getOutputQueryResultDirectory() { + protected String getOutputQueryResultDirectory() { return outputQueryResultDirectory; } - public void setOutputQueryResultDirectory(String outputQueryResultDirectory) { + protected void setOutputQueryResultDirectory(String outputQueryResultDirectory) { this.outputQueryResultDirectory = outputQueryResultDirectory; } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java index c317e5ebe..f35f01fb6 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java @@ -31,7 +31,7 @@ * */ @Command(name = "java -jar VLog4jClient.jar", description = "VLog4jClient: A command line client of VLog4j.", subcommands = { - VLog4jClientMaterialize.class}) + VLog4jClientMaterialize.class }) public class VLog4jClient implements Runnable { public static void main(String[] args) { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index c77e8050c..75973312f 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.List; -import javax.naming.ConfigurationException; - import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -54,8 +52,7 @@ public class VLog4jClientMaterialize implements Runnable { private final KnowledgeBase kb = new KnowledgeBase(); private final List queries = new ArrayList<>(); - // TODO add link to rls syntax - @Option(names = "--rule-file", description = "Rule file(s) in rls syntax", required = true) + @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar} syntax", required = true) private List ruleFiles = new ArrayList<>(); // TODO @@ -115,15 +112,22 @@ public void run() { } private void validateConfiguration() { - try { - printQueryResults.validate(); - saveQueryResults.validate(); - // TODO saveModel.validate(); - } catch (ConfigurationException e) { - System.err.println("Configuration Error: " + e.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + if (!printQueryResults.isValid()) { + printMessageAndExit(PrintQueryResults.configurationErrorMessage); + } + if (!saveQueryResults.isConfigurationValid()) { + printMessageAndExit(SaveQueryResults.configurationErrorMessage); + } + if (saveQueryResults.isSaveResults() && !saveQueryResults.isDirectoryValid()) { + printMessageAndExit(SaveQueryResults.wrongDirectoryErrorMessage); } + // TODO + // if (!saveModel.isConfigurationValid()) { + // printMessageAndExit(SaveModel.configurationErrorMessage); + // } + // if (saveModel.isSaveResults() && !saveModel.isDirectoryValid()) { + // printMessageAndExit(SaveModel.wrongDirectoryErrorMessage); + // } } private void configureRules() { @@ -131,13 +135,11 @@ private void configureRules() { try { RuleParser.parseInto(kb, new FileInputStream(ruleFile)); } catch (FileNotFoundException e1) { - System.err.println("File not found: " + ruleFile + ". " + e1.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + throw new RuntimeException( + "File not found: " + ruleFile + ". " + e1.getMessage() + "\nExiting the program."); } catch (ParsingException e2) { - System.err.println("Failed to parse rule file: " + ruleFile + ". " + e2.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + throw new RuntimeException( + "Failed to parse rule file: " + ruleFile + ". " + e2.getMessage() + "\nExiting the program."); } } } @@ -171,14 +173,14 @@ private void materialize(Reasoner reasoner) { try { reasoner.reason(); } catch (IOException e) { - System.err.println("Something went wrong. Please check the log file." + e.getMessage()); - System.err.println("Exiting the program."); - System.exit(1); + throw new RuntimeException( + "Something went wrong. Please check the log file." + e.getMessage() + "\nExiting the program."); } } // TODO private void saveModel() {...} + private void answerQueries(Reasoner reasoner) { if (!queries.isEmpty()) { System.out.println("Answering queries ..."); @@ -222,6 +224,7 @@ private void printConfiguration() { } private void doSaveQueryResults(Reasoner reasoner, PositiveLiteral query) { + saveQueryResults.mkdir(); try { reasoner.exportQueryAnswersToCsv(query, queryOputputPath(query), true); } catch (IOException e) { @@ -238,4 +241,10 @@ private void doPrintResults(Reasoner reasoner, PositiveLiteral query) { private String queryOputputPath(PositiveLiteral query) { return saveQueryResults.getOutputQueryResultDirectory() + "/" + query + ".csv"; } + + private void printMessageAndExit(String message) { + System.err.println(message); + System.exit(1); + } + } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java index 65f07f306..686fe7506 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -21,98 +21,84 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import org.junit.Test; import org.vlog4j.client.picocli.PrintQueryResults; -import javax.naming.ConfigurationException; public class PrintQueryResultsTest { String outputConfigurationBase = " --print-query-result-size: %b\n --print-complete-query-result: %b\n"; + private final PrintQueryResults sizeTrueCompleteTrue = new PrintQueryResults(true, true); + private final PrintQueryResults sizeTrueCompleteFalse = new PrintQueryResults(true, false); + private final PrintQueryResults sizeFalseCompleteTrue = new PrintQueryResults(false, true); + private final PrintQueryResults sizeFalseCompleteFalse = new PrintQueryResults(false, false); + @Test - public void validate_sizeTrueCompleteFalse_valid() throws ConfigurationException { + public void isValid_sizeTrueCompleteFalse_valid() { // default configuration - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(false); - prq.validate(); + assertTrue(sizeTrueCompleteFalse.isValid()); } @Test - public void validate_sizeFalseCompleteTrue_valid() throws ConfigurationException { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(true); - prq.validate(); + public void isValid_sizeFalseCompleteTrue_valid() { + assertTrue(sizeFalseCompleteTrue.isValid()); } - @Test(expected = ConfigurationException.class) - public void validate_sizeTrueCompleteTrue_notValid() throws ConfigurationException { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(true); - prq.validate(); + @Test + public void isValid_sizeTrueCompleteTrue_notValid() { + assertFalse(sizeTrueCompleteTrue.isValid()); } @Test - public void validate_sizeFalseCompleteFalse_valid() throws ConfigurationException { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(false); - prq.validate(); + public void isValid_sizeFalseCompleteFalse_valid() { + assertTrue(sizeFalseCompleteFalse.isValid()); } @Test - public void printConfiguration_sizeTrueCompleteFalse_valid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(false); - assertEquals(String.format(outputConfigurationBase, true, false), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeTrueCompleteFalse() { + assertEquals(String.format(outputConfigurationBase, true, false), + captureOutputPrintConfiguration(sizeTrueCompleteFalse)); } @Test - public void printConfiguration_sizeFalseCompleteTrue_valid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(true); - assertEquals(String.format(outputConfigurationBase, false, true), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeFalseCompleteTrue() { + assertEquals(String.format(outputConfigurationBase, false, true), + captureOutputPrintConfiguration(sizeFalseCompleteTrue)); } @Test - public void printConfiguration_sizeTrueCompleteTrue_notValid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(true); - prq.setComplete(true); - assertEquals(String.format(outputConfigurationBase, true, true), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeTrueCompleteTrue() { + assertEquals(String.format(outputConfigurationBase, true, true), + captureOutputPrintConfiguration(sizeTrueCompleteTrue)); } @Test - public void printConfiguration_sizeFalseCompleteFalse_valid() { - PrintQueryResults prq = new PrintQueryResults(); - prq.setSizeOnly(false); - prq.setComplete(false); - assertEquals(String.format(outputConfigurationBase, false, false), captureOutputPrintConfiguration(prq)); + public void printConfiguration_sizeFalseCompleteFalse() { + assertEquals(String.format(outputConfigurationBase, false, false), + captureOutputPrintConfiguration(sizeFalseCompleteFalse)); } @Test public void setSizeOnly_and_isSizeOnly() { PrintQueryResults prq = new PrintQueryResults(); prq.setSizeOnly(false); - assertEquals(false, prq.isSizeOnly()); + assertFalse(prq.isSizeOnly()); prq.setSizeOnly(true); - assertEquals(true, prq.isSizeOnly()); + assertTrue(prq.isSizeOnly()); } @Test public void setComplete_and_isComplete() { PrintQueryResults prq = new PrintQueryResults(); prq.setComplete(false); - assertEquals(false, prq.isComplete()); + assertFalse(prq.isComplete()); prq.setComplete(true); - assertEquals(true, prq.isComplete()); + assertTrue(prq.isComplete()); } private String captureOutputPrintConfiguration(PrintQueryResults prq) { diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java index 1810221cf..53d80e267 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java @@ -2,12 +2,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.IOException; import java.io.PrintStream; -import javax.naming.ConfigurationException; +import org.junit.Rule; /*- * #%L @@ -30,172 +33,147 @@ */ import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.vlog4j.client.picocli.SaveModel; public class SaveModelTest { private final String outputConfigurationBase = " --save-model: %b\n --output-model-directory: %s\n"; - private final String dir = "directory"; - private final String tempDir = "tempDir"; private final String defaultDir = "model"; + private final SaveModel saveTrueDefaultDir = new SaveModel(true, defaultDir); + private final SaveModel saveTrueEmptyDir = new SaveModel(true, ""); + private final SaveModel saveTrueNullDir = new SaveModel(true, null); + private final SaveModel saveFalseDefaultDir = new SaveModel(false, defaultDir); + private final SaveModel saveFalseEmptyDir = new SaveModel(false, ""); + private final SaveModel saveFalseNullDir = new SaveModel(false, null); + + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); + @Test - public void validate_saveModelTrueDefaultDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(defaultDir); - sm.validate(); + public void isConfigurationValid_saveTrueDefaultDir_valid() { + assertTrue(saveTrueDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelTrueValidDir_valid() throws ConfigurationException { - // default configuration - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - sm.validate(); + public void isConfigurationValid_saveTrueEmptyDir_nonValid() { + assertFalse(saveTrueEmptyDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueEmptyDir_notValid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(""); - sm.validate(); + @Test + public void isConfigurationValid_saveTrueNullDir_nonValid() { + assertFalse(saveTrueNullDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueNullDir_notValid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(null); - sm.validate(); + @Test + public void isConfigurationValid_saveFalseDefaultDir_valid() { + assertTrue(saveFalseDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseDefaultDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(defaultDir); - sm.validate(); + public void isConfigurationValid_saveFalseEmptyDir_valid() { + assertTrue(saveFalseEmptyDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseValidDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - sm.validate(); + public void isConfigurationValid_saveFalseNullDir_valid() { + assertTrue(saveFalseNullDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseEmptyDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(""); - sm.validate(); + public void isDirectoryValid_nonExistingDirectory_valid() throws IOException { + File nonExistingDirectory = tempFolder.newFolder("folderPath"); + nonExistingDirectory.delete(); + SaveModel temp = new SaveModel(true, nonExistingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void validate_saveModelFalseNullDir_valid() throws ConfigurationException { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(null); - sm.validate(); + public void isDirectoryValid_existingDirectory_valid() throws IOException { + File existingDirectory = tempFolder.newFolder("folderPath"); + existingDirectory.mkdir(); + SaveModel temp = new SaveModel(true, existingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueDefaultDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, true, defaultDir), captureOutputPrintConfiguration(sm)); + public void isDirectoryValid_existingFile_nonValid() throws IOException { + File existingFile = tempFolder.newFile("filePath"); + existingFile.createNewFile(); + SaveModel temp = new SaveModel(true, existingFile.getAbsolutePath()); + assertFalse(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueValidDir_valid() { - // default configuration - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(sm)); + public void mkdir_saveTrueNonExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.delete(); + SaveModel temp = new SaveModel(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueEmptyDir_notValid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(""); - assertEquals(String.format(outputConfigurationBase, true, ""), captureOutputPrintConfiguration(sm)); + public void mkdir_saveTrueExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.mkdirs(); + SaveModel temp = new SaveModel(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueNullDir_notValid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(null); - assertEquals(String.format(outputConfigurationBase, true, null), captureOutputPrintConfiguration(sm)); + public void mkdir_saveFalse() throws IOException { + File folder = tempFolder.newFile("validNonExistingFolder"); + folder.delete(); + SaveModel temp = new SaveModel(false, folder.getAbsolutePath()); + temp.mkdir(); + assertFalse(folder.exists()); } @Test - public void printConfiguration_saveModelFalseDefaultDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, false, defaultDir), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveTrueDefaultDir() { + assertEquals(String.format(outputConfigurationBase, true, defaultDir), + captureOutputPrintConfiguration(saveTrueDefaultDir)); } @Test - public void printConfiguration_saveModelFalseValidDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveTrueEmptyDir() { + assertEquals(String.format(outputConfigurationBase, true, ""), + captureOutputPrintConfiguration(saveTrueEmptyDir)); } @Test - public void printConfiguration_saveModelFalseEmptyDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(""); - assertEquals(String.format(outputConfigurationBase, false, ""), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveTrueNullDir() { + assertEquals(String.format(outputConfigurationBase, true, null), + captureOutputPrintConfiguration(saveTrueNullDir)); } @Test - public void printConfiguration_saveModelFalseNullDir_valid() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(null); - assertEquals(String.format(outputConfigurationBase, false, null), captureOutputPrintConfiguration(sm)); + public void printConfiguration_saveFalseDefaultDir() { + assertEquals(String.format(outputConfigurationBase, false, defaultDir), + captureOutputPrintConfiguration(saveFalseDefaultDir)); } @Test - public void prepare_saveModelTrueValidDir() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(true); - sm.setOutputModelDirectory(tempDir); - sm.prepare(); - File f = new File(tempDir); - assert (f.exists() && f.isDirectory()); - f.delete(); + public void printConfiguration_saveFalseEmptyDir() { + assertEquals(String.format(outputConfigurationBase, false, ""), + captureOutputPrintConfiguration(saveFalseEmptyDir)); } @Test - public void prepare_saveModelFalseValidDir() { - SaveModel sm = new SaveModel(); - sm.setSaveModel(false); - sm.setOutputModelDirectory(dir); - sm.prepare(); - File f = new File(tempDir); - assertFalse(f.exists()); + public void printConfiguration_saveFalseNullDir() { + assertEquals(String.format(outputConfigurationBase, false, null), + captureOutputPrintConfiguration(saveFalseNullDir)); } @Test public void setSaveModel_and_isSaveModel() { SaveModel sm = new SaveModel(); sm.setSaveModel(true); - assertEquals(true, sm.isSaveModel()); + assertTrue(sm.isSaveModel()); sm.setSaveModel(false); - assertEquals(false, sm.isSaveModel()); + assertFalse(sm.isSaveModel()); } @Test @@ -203,10 +181,8 @@ public void setOutputModelDirectory_and_getOutputModelDirectory() { SaveModel sm = new SaveModel(); sm.setOutputModelDirectory(""); assertEquals("", sm.getOutputModelDirectory()); - sm.setOutputModelDirectory(dir); - assertEquals(dir, sm.getOutputModelDirectory()); sm.setOutputModelDirectory(null); - assertEquals(null, sm.getOutputModelDirectory()); + assertNull(sm.getOutputModelDirectory()); } private String captureOutputPrintConfiguration(SaveModel sm) { diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java index 3c8c3d5d7..65988e564 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java @@ -2,12 +2,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.IOException; import java.io.PrintStream; -import javax.naming.ConfigurationException; +import org.junit.Rule; /*- * #%L @@ -30,182 +33,155 @@ */ import org.junit.Test; +import org.junit.rules.TemporaryFolder; public class SaveQueryResultsTest { private final String outputConfigurationBase = " --save-query-results: %b\n --output-query-result-directory: %s\n"; - private final String dir = "directory"; - private final String tempDir = "tempDir"; private final String defaultDir = "query-results"; + private final SaveQueryResults saveTrueDefaultDir = new SaveQueryResults(true, defaultDir); + private final SaveQueryResults saveTrueEmptyDir = new SaveQueryResults(true, ""); + private final SaveQueryResults saveTrueNullDir = new SaveQueryResults(true, null); + private final SaveQueryResults saveFalseDefaultDir = new SaveQueryResults(false, defaultDir); + private final SaveQueryResults saveFalseEmptyDir = new SaveQueryResults(false, ""); + private final SaveQueryResults saveFalseNullDir = new SaveQueryResults(false, null); + + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); + @Test - public void validate_saveQueryResultsTrueDefaultDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(defaultDir); - srq.validate(); + public void isConfigurationValid_saveTrueDefaultDir_valid() { + assertTrue(saveTrueDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelTrueValidDir_valid() throws ConfigurationException { - // default configuration - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - srq.validate(); + public void isConfigurationValid_saveTrueEmptyDir_notValid() { + assertFalse(saveTrueEmptyDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueEmptyDir_notValid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(""); - srq.validate(); + @Test + public void isConfigurationValid_saveTrueNullDir_notValid() { + assertFalse(saveTrueNullDir.isConfigurationValid()); } - @Test(expected = ConfigurationException.class) - public void validate_saveModelTrueNullDir_notValid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(null); - srq.validate(); + @Test + public void isConfigurationValid_saveFalseDefaultDir_valid() { + assertTrue(saveFalseDefaultDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseDefaultDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(defaultDir); - srq.validate(); + public void isConfigurationValid_saveFalseEmptyDir_valid() { + assertTrue(saveFalseEmptyDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseValidDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - srq.validate(); + public void isConfigurationValid_saveFalseNullDir_valid() { + assertTrue(saveFalseNullDir.isConfigurationValid()); } @Test - public void validate_saveModelFalseEmptyDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(""); - srq.validate(); + public void isDirectoryValid_nonExistingDirectory_valid() throws IOException { + File nonExistingDirectory = tempFolder.newFolder("folderPath"); + nonExistingDirectory.delete(); + SaveQueryResults temp = new SaveQueryResults(true, nonExistingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void validate_saveModelFalseNullDir_valid() throws ConfigurationException { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(null); - srq.validate(); + public void isDirectoryValid_existingDirectory_valid() throws IOException { + File existingDirectory = tempFolder.newFolder("folderPath"); + existingDirectory.mkdir(); + SaveQueryResults temp = new SaveQueryResults(true, existingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueDefaultDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, true, defaultDir), captureOutputPrintConfiguration(srq)); + public void isDirectoryValid_existingFile_nonValid() throws IOException { + File existingFile = tempFolder.newFile("filePath"); + existingFile.createNewFile(); + SaveQueryResults temp = new SaveQueryResults(true, existingFile.getAbsolutePath()); + assertFalse(temp.isDirectoryValid()); } @Test - public void printConfiguration_saveModelTrueValidDir_valid() { - // default configuration - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(srq)); + public void mkdir_saveTrueNonExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.delete(); + SaveQueryResults temp = new SaveQueryResults(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueEmptyDir_notValid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(""); - assertEquals(String.format(outputConfigurationBase, true, ""), captureOutputPrintConfiguration(srq)); + public void mkdir_saveTrueExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.mkdirs(); + SaveQueryResults temp = new SaveQueryResults(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); } @Test - public void printConfiguration_saveModelTrueNullDir_notValid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(null); - assertEquals(String.format(outputConfigurationBase, true, null), captureOutputPrintConfiguration(srq)); + public void mkdir_saveFalse() throws IOException { + File folder = tempFolder.newFile("validNonExistingFolder"); + folder.delete(); + SaveQueryResults temp = new SaveQueryResults(false, folder.getAbsolutePath()); + temp.mkdir(); + assertFalse(folder.exists()); } @Test - public void printConfiguration_saveModelFalseDefaultDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(defaultDir); - assertEquals(String.format(outputConfigurationBase, false, defaultDir), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveTrueDefaultDir() { + assertEquals(String.format(outputConfigurationBase, true, defaultDir), + captureOutputPrintConfiguration(saveTrueDefaultDir)); } @Test - public void printConfiguration_saveModelFalseValidDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - assertEquals(String.format(outputConfigurationBase, false, dir), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveTrueEmptyDir() { + assertEquals(String.format(outputConfigurationBase, true, ""), + captureOutputPrintConfiguration(saveTrueEmptyDir)); } @Test - public void printConfiguration_saveModelFalseEmptyDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(""); - assertEquals(String.format(outputConfigurationBase, false, ""), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveTrueNullDir() { + assertEquals(String.format(outputConfigurationBase, true, null), + captureOutputPrintConfiguration(saveTrueNullDir)); } @Test - public void printConfiguration_saveModelFalseNullDir_valid() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(null); - assertEquals(String.format(outputConfigurationBase, false, null), captureOutputPrintConfiguration(srq)); + public void printConfiguration_saveFalseDefaultDir() { + assertEquals(String.format(outputConfigurationBase, false, defaultDir), + captureOutputPrintConfiguration(saveFalseDefaultDir)); } @Test - public void prepare_saveModelTrueValidDir() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(true); - srq.setOutputQueryResultDirectory(tempDir); - srq.prepare(); - File f = new File(tempDir); - assert (f.exists() && f.isDirectory()); - f.delete(); + public void printConfiguration_saveFalseEmptyDir() { + assertEquals(String.format(outputConfigurationBase, false, ""), + captureOutputPrintConfiguration(saveFalseEmptyDir)); } @Test - public void prepare_saveModelFalseValidDir() { - SaveQueryResults srq = new SaveQueryResults(); - srq.setSaveResults(false); - srq.setOutputQueryResultDirectory(dir); - srq.prepare(); - File f = new File(tempDir); - assertFalse(f.exists()); + public void printConfiguration_saveFalseNullDir() { + assertEquals(String.format(outputConfigurationBase, false, null), + captureOutputPrintConfiguration(saveFalseNullDir)); } @Test public void setSaveResults_and_isSaveResults() { SaveQueryResults srq = new SaveQueryResults(); srq.setSaveResults(true); - assertEquals(true, srq.isSaveResults()); + assertTrue(srq.isSaveResults()); srq.setSaveResults(false); - assertEquals(false, srq.isSaveResults()); + assertFalse(srq.isSaveResults()); } @Test - public void setOutputQueryResultDirectory_and_getOutputModelDirectory() { + public void setOutputQueryResultDirectory_and_getOutputQueryResultsDirectory() { SaveQueryResults srq = new SaveQueryResults(); srq.setOutputQueryResultDirectory(""); assertEquals("", srq.getOutputQueryResultDirectory()); - srq.setOutputQueryResultDirectory(dir); - assertEquals(dir, srq.getOutputQueryResultDirectory()); srq.setOutputQueryResultDirectory(null); - assertEquals(null, srq.getOutputQueryResultDirectory()); + assertNull(srq.getOutputQueryResultDirectory()); } private String captureOutputPrintConfiguration(SaveQueryResults srq) { From 5b269b27d882a9f9b82e86de4e2fb46eec837aa7 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 30 Oct 2019 22:17:48 +0100 Subject: [PATCH 0586/1255] added some tests and modified Serializer --- .../core/model/implementation/Serializer.java | 15 +++++--- .../vlog4j/core/model/FactTest.java | 9 +++++ .../vlog4j/core/model/RuleImplTest.java | 20 +++++++++-- .../vlog4j/core/model/TermImplTest.java | 36 +++++++++++++++++++ 4 files changed, 73 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index c4b6fc118..091b5e9fe 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -38,7 +38,11 @@ * @author Ali Elhalawati * */ -public class Serializer { +public final class Serializer { + + private Serializer() { + + } public static String getRuleString(RuleImpl rule) { return rule.getHead() + " :- " + rule.getBody() + "."; @@ -83,15 +87,16 @@ public static String getDatatypeConstantString(DatatypeConstantImpl datatypecons return datatypeconstant.getName(); } + public static String getNamedNullString(NamedNullImpl namednull) { + return "_" + namednull.getName(); + } + public static String getLanguageConstantString(LanguageStringConstantImpl languagestringconstant) { return languagestringconstant.getName(); } - public static String getNamedNullString(NamedNullImpl namednull) { - return "_" + namednull.toString(); - } public static String getPredicateString(Predicate predicate) { - return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; + return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index 9301dcff8..e9b29787e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -54,4 +54,13 @@ public void factsOnlyContainConstants() { new FactImpl(p, Arrays.asList(x)); } + @Test + public void testtoString() { + final Predicate p = Expressions.makePredicate("p", 2); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); + assertEquals("p(c, d)", f1.toString()); + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index be3cee765..7c05f8b61 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -78,7 +78,6 @@ public void testEquals() { final Rule rule8 = Expressions.makePositiveLiteralsRule(headPositiveLiterals, bodyPositiveLiterals); assertEquals(rule1, rule1); - assertEquals(rule1.toString(),"q(?X, !Y) :- p(?X, c), p(?X, ?Z)."); assertEquals(rule2, rule1); assertEquals(rule2.hashCode(), rule1.hashCode()); @@ -96,7 +95,7 @@ public void testEquals() { assertNotEquals(rule5, rule1); assertFalse(rule1.equals(null)); assertFalse(rule1.equals(c)); - + } @Test(expected = IllegalArgumentException.class) @@ -139,4 +138,21 @@ public void noUnsafeVariables() { Expressions.makeRule(literal1, literal2); } + @Test + public void testtoString() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeExistentialVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + final Constant c = Expressions.makeAbstractConstant("c"); + final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); + final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); + final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + final Conjunction bodyPositiveLiterals = Expressions.makePositiveConjunction(atom1, atom2); + final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); + + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 0c11dd8d6..1c9e98f1f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -132,6 +132,42 @@ public void namedNullGetterTest() { assertEquals(TermType.NAMED_NULL, n.getType()); } + @Test + public void testabstractConstantImpltoString() { + AbstractConstantImpl c = new AbstractConstantImpl("c"); + assertEquals("c", c.toString()); + } + + @Test + public void testdatatypeConstantImpltoString() { + DatatypeConstantImpl c = new DatatypeConstantImpl("c", "http://example.org/mystring"); + assertEquals("\"c\"^^", c.toString()); + } + + @Test + public void testlanguageStringConstantImpltoString() { + LanguageStringConstantImpl c = new LanguageStringConstantImpl("Test", "en"); + assertEquals("\"Test\"@en", c.toString()); + } + + @Test + public void testuniversalVariabletoString() { + UniversalVariableImpl v = new UniversalVariableImpl("v"); + assertEquals("?v", v.toString()); + } + + @Test + public void testexistentialVariabletoString() { + ExistentialVariableImpl v = new ExistentialVariableImpl("v"); + assertEquals("!v", v.toString()); + } + + @Test + public void testnamedNulltoString() { + NamedNullImpl n = new NamedNullImpl("123"); + assertEquals("_123", n.toString()); + } + @Test(expected = NullPointerException.class) public void constantNameNonNullTest() { new AbstractConstantImpl((String) null); From ec4e0a7fcbc505cfea4c3d60248189d3cf150e75 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 31 Oct 2019 13:18:31 +0100 Subject: [PATCH 0587/1255] delete unused class --- .../picocli/VLog4jClientTestAcyclicity.java | 42 ------------------- 1 file changed, 42 deletions(-) delete mode 100644 vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java deleted file mode 100644 index 805811165..000000000 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientTestAcyclicity.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.vlog4j.client.picocli; - -/*- - * #%L - * VLog4j Client - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import picocli.CommandLine.Command; - -@Command(name = "testacyclicity", description = "Test if the rule set satisfies any acyclicity notion") -public class VLog4jClientTestAcyclicity implements Runnable { - -// TODO implement the following method -// @Option(names = "--acyclicity-notion", required = false, description = "Acyclicity notion. One of:JA (Joint Acyclicity), RJA (Restricted Joint Acyclicity), RFA (Model-Faithful Acyclicity), RMFA (Restricted Model-Faithful Acyclicity). All by default.") -// String acyclicityNotion; - -// TODO implement the following method -// @Option(names = "--rule-file", description = "Rule file in rls syntax", required = true) -// private String rulePath; - - @Override - public void run() { - System.err.println("Not implemented yet."); - System.err.println("Exiting the program."); - } - -} From 6b8b41d1c30022c47a454f59bead7d77060f0a06 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 31 Oct 2019 13:58:12 +0100 Subject: [PATCH 0588/1255] fixed fact tostring --- .../vlog4j/core/model/implementation/FactImpl.java | 6 ++++++ .../vlog4j/core/model/implementation/Serializer.java | 4 ---- .../java/org/semanticweb/vlog4j/core/model/FactTest.java | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index cb79676f8..b08939577 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -47,5 +47,11 @@ public FactImpl(Predicate predicate, List terms) { public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } + + @Override + public String toString() { + return Serializer.getLiteralString(this)+"."; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 091b5e9fe..01496eb18 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -67,10 +67,6 @@ public static String getLiteralString(AbstractLiteralImpl literal) { return stringBuilder.toString(); } - public static String getFactString(FactImpl fact) { - return fact.toString() + "."; - } - public static String getConstantString(AbstractConstantImpl constant) { return constant.getName(); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index e9b29787e..d2da2f1de 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -60,7 +60,7 @@ public void testtoString() { final Constant c = Expressions.makeAbstractConstant("c"); final Constant d = Expressions.makeAbstractConstant("d"); final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); - assertEquals("p(c, d)", f1.toString()); + assertEquals("p(c, d).", f1.toString()); } } From 30c5a83c67fbda6f7c53ec5b835f5e6ef791edf8 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 31 Oct 2019 14:19:14 +0100 Subject: [PATCH 0589/1255] added tests for conjunction and datasourcedeclaration --- .../vlog4j/core/model/ConjunctionImplTest.java | 15 +++++++++++++++ .../core/model/DataSourceDeclarationTest.java | 13 ++++++++++++- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index cb787b901..4df0c8f68 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -181,4 +181,19 @@ public void negativeLiteralsNoNullElements() { Expressions.makeConjunction(negativeLiteralList); } + @Test + public void testconjunctiontoString() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final List positiveLiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, + positiveLiteral3); + final Conjunction conjunction1 = new ConjunctionImpl<>(positiveLiteralList); + assertEquals("p(?X, c), p(?Y, ?X), q(?X, d)", conjunction1.toString()); + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index c718ccd1a..4e35fcf32 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -41,7 +41,6 @@ public void equalityTest() throws MalformedURLException { "?var wdt:P31 wd:Q5 ."); Predicate predicate1 = Expressions.makePredicate("p", 3); DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); Predicate predicate2 = Expressions.makePredicate("p", 3); @@ -63,4 +62,16 @@ public void equalityTest() throws MalformedURLException { assertFalse(dataSourceDeclaration1.equals(null)); // written like this for recording coverage properly } + @Test + public void testdataSourceDecalarationtoString() throws MalformedURLException { + DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + Predicate predicate1 = Expressions.makePredicate("p", 3); + DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + Predicate predicate2 = Expressions.makePredicate("p", 3); + DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + assertEquals(dataSourceDeclaration1.toString(), dataSourceDeclaration2.toString()); + } } From f6f615420ba60158795ee9cdeb3dc6415748b250 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:49:58 +0100 Subject: [PATCH 0590/1255] add methods to to query the number of results in a query --- .../vlog4j/core/reasoner/Reasoner.java | 102 ++++++++++++++++++ .../reasoner/implementation/VLogReasoner.java | 50 +++++++-- 2 files changed, 143 insertions(+), 9 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f391bbe80..6c274b820 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -330,6 +330,108 @@ public static Reasoner getInstance() { */ QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); + // TODO add examples to query javadoc + /** + * Evaluates an atomic ({@code query}), and returns the number of implicit facts + * loaded into the reasoner and the explicit facts materialised by the reasoner, + * including nulls.
+ * An answer to the query is the terms a fact that matches the {@code query}: + * the fact predicate is the same as the {@code query} predicate, the + * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer + * fact at the same term position, and the {@link TermType#VARIABLE} terms of + * the {@code query} are matched by terms in the fact, either named + * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The + * same variable name identifies the same term in the answer fact.
+ * A query answer is represented by a {@link QueryResult}. A query can have + * multiple, distinct query answers. This method returns an Iterator over these + * answers.
+ * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + * ({@link QueryResultIterator#getCorrectness()}): + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
      + *
    • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
    • + *
    • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
    • + *
    + *
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
+ * + * + * @param query a {@link PositiveLiteral} representing the query to be answered. + * @return number of facts in the extension of the query. + */ + long queryAnswerSize(PositiveLiteral query); + + // TODO add examples to query javadoc + /** + * Evaluates an atomic ({@code query}), and returns the number of implicit facts + * loaded into the reasoner and the explicit facts materialised by the reasoner. + *
+ * An answer to the query is the terms a fact that matches the {@code query}: + * the fact predicate is the same as the {@code query} predicate, the + * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer + * fact at the same term position, and the {@link TermType#VARIABLE} terms of + * the {@code query} are matched by terms in the fact, either named + * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The + * same variable name identifies the same term in the answer fact.
+ * A query answer is represented by a {@link QueryResult}. A query can have + * multiple, distinct query answers. This method returns an Iterator over these + * answers.
+ * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + * ({@link QueryResultIterator#getCorrectness()}): + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
      + *
    • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
    • + *
    • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
    • + *
    + *
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
+ * + * + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of + * type {@link TermType#NAMED_NULL} (representing anonymous + * individuals introduced to satisfy rule existentially + * quantified variables) will be included. Otherwise, the + * answers will only contain the {@link QueryResult}s with + * terms of type {@link TermType#CONSTANT} (representing + * named individuals). + * @return number of facts in the extension of the query. + */ + long queryAnswerSize(PositiveLiteral query, boolean includeNulls); + // TODO add examples to query javadoc /** * Evaluates an atomic query ({@code query}) on the implicit facts loaded into diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index d355e17f2..33f1f428f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -569,14 +569,14 @@ private void runChase() { } @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); - final boolean filterBlanks = !includeBlanks; + final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); TermQueryResultIterator stringQueryResultIterator; @@ -594,6 +594,33 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBla return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); } + @Override + public long queryAnswerSize(PositiveLiteral query) { + return queryAnswerSize(query, true); + } + + @Override + public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + validateNotClosed(); + validateKBLoaded("Querying is not alowed before reasoner is loaded!"); + Validate.notNull(query, "Query atom must not be null!"); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + int result = -1; + try { + result = this.vLog.querySize(vLogAtom, true, filterBlanks); + } catch (NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (NonExistingPredicateException e) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + return 0; + } + return result; + } + @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { @@ -740,19 +767,18 @@ public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementsAdded(statementsAdded); + + // updateCorrectnessOnStatementsAdded(statementsAdded); updateCorrectness(); } - @Override public void onStatementAdded(Statement statementAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementAdded(statementAdded); + + // updateCorrectnessOnStatementAdded(statementAdded); updateCorrectness(); } @@ -766,9 +792,9 @@ private void updateReasonerToKnowledgeBaseChanged() { private void updateCorrectness() { if (this.reasonerState == ReasonerState.KB_CHANGED) { - + final boolean noRules = this.knowledgeBase.getRules().isEmpty(); - this.correctness = noRules? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; + this.correctness = noRules ? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; } } @@ -784,4 +810,10 @@ void validateNotClosed() throws ReasonerStateException { } } + void validateKBLoaded(String errorMessage) { + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, errorMessage); + } + } + } From 4e058806ab50de6d32b173211834800ca261e33a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:51:24 +0100 Subject: [PATCH 0591/1255] remove helper methods: getQueryAnswerCount, iteratorSize --- .../vlog4j/examples/ExamplesUtils.java | 45 ------------------- 1 file changed, 45 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index a21ef2999..906d3c368 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -112,51 +112,6 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner } } - /** - * Returns the number of answers returned by {@code reasoner} to the query - * ({@code queryAtom}). - * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on - */ - public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { - try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { - return iteratorSize(answers); - } - } - - /** - * Returns the number of answers returned by {@code reasoner} to the query - * ({@code queryAtom}). - * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on - */ - public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { - try { - final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); - return getQueryAnswerCount(query, reasoner); - } catch (final ParsingException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - /** - * Returns the size of an iterator. - * - * @FIXME This is an inefficient way of counting results. It should be done at a - * lower level instead - * @param Iterator to iterate over - * @return number of elements in iterator - */ - private static int iteratorSize(final Iterator iterator) { - int size = 0; - for (; iterator.hasNext(); ++size) { - iterator.next(); - } - return size; - } - /** * Creates an Atom with @numberOfVariables distinct variables * From 59a50a9c20c19571f3db5f8ae462a4e020a1704f Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:52:56 +0100 Subject: [PATCH 0592/1255] add test for native querySize --- .../implementation/QuerySizeTest.java | 262 ++++++++++++++++++ 1 file changed, 262 insertions(+) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java new file mode 100644 index 000000000..0c2699f32 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java @@ -0,0 +1,262 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +public class QuerySizeTest { + + private static final Predicate predP = Expressions.makePredicate("P", 1); + private static final Predicate predQ = Expressions.makePredicate("Q", 1); + private static final Predicate predR = Expressions.makePredicate("R", 2); + private static final Variable x = Expressions.makeUniversalVariable("x"); + private static final Variable y = Expressions.makeExistentialVariable("y"); + private static final Constant c = Expressions.makeAbstractConstant("c"); + private static final Constant d = Expressions.makeAbstractConstant("d"); + private static final Constant e = Expressions.makeAbstractConstant("e"); + private static final Constant f = Expressions.makeAbstractConstant("f"); + + private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); + private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); + private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); + private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); + private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); + private static final Conjunction conPx = Expressions.makeConjunction(Px); + + private static final Rule ruleQxPx = Expressions.makeRule(Qx, Px); + private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); + + private static final Fact factPc = Expressions.makeFact(predP, c); + private static final Fact factPd = Expressions.makeFact(predP, d); + + private static final Fact factQe = Expressions.makeFact(predQ, e); + private static final Fact factQf = Expressions.makeFact(predQ, f); + + private static final PositiveLiteral Rdy = Expressions.makePositiveLiteral(predR, d, y); + private static final PositiveLiteral Rey = Expressions.makePositiveLiteral(predR, e, y); + private static final PositiveLiteral Rxd = Expressions.makePositiveLiteral(predR, x, d); + private static final PositiveLiteral Rxe = Expressions.makePositiveLiteral(predR, x, e); + + @Test + public void noFactsnoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void noFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(ruleQxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void noFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void pFactsNoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + } + } + + @Test + public void pFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, ruleQxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void pFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(0, reasoner.queryAnswerSize(Qx, false)); + assertEquals(2, reasoner.queryAnswerSize(Rxy)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void qFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px)); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void qFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.queryAnswerSize(Px)); + assertEquals(0, reasoner.queryAnswerSize(Px, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Qx)); + assertEquals(2, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void pFactsQFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(4, reasoner.queryAnswerSize(Qx)); + assertEquals(4, reasoner.queryAnswerSize(Qx, true)); + assertEquals(4, reasoner.queryAnswerSize(Qx, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxy)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + } + } + + @Test + public void pFactsQFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(4, reasoner.queryAnswerSize(Qx)); + assertEquals(4, reasoner.queryAnswerSize(Qx, true)); + assertEquals(2, reasoner.queryAnswerSize(Qx, false)); + assertEquals(2, reasoner.queryAnswerSize(Rxy)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rey, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); + assertEquals(0, reasoner.queryAnswerSize(Rey, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + } + } + + @Test + public void pFactsQFactsExistentialAndUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px)); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(6, reasoner.queryAnswerSize(Qx)); + assertEquals(6, reasoner.queryAnswerSize(Qx, true)); + assertEquals(4, reasoner.queryAnswerSize(Qx, false)); + assertEquals(2, reasoner.queryAnswerSize(Rxy)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); + assertEquals(0, reasoner.queryAnswerSize(Rey, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); + assertEquals(0, reasoner.queryAnswerSize(Rey, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + } + } + +} From dde14c285ca50a1129c33c2a5d8a8945211bd701 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 11:53:23 +0100 Subject: [PATCH 0593/1255] update code to use native queriSize --- .../client/picocli/VLog4jClientMaterialize.java | 3 +-- .../vlog4j/examples/CompareWikidataDBpedia.java | 6 +++--- .../vlog4j/examples/CountingTriangles.java | 17 ++++++++++------- .../vlog4j/examples/DoidExample.java | 5 +++-- .../examples/InMemoryGraphAnalysisExample.java | 10 ++++++---- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../vlog4j/examples/graal/DoidExampleGraal.java | 5 +++-- 7 files changed, 27 insertions(+), 21 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index 7758a0065..6f4755f5c 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -227,8 +227,7 @@ private void doSaveQueryResults(Reasoner reasoner, PositiveLiteral query) { } private void doPrintResults(Reasoner reasoner, PositiveLiteral query) { - System.out.println( - "Number of query answers in " + query + ": " + ExamplesUtils.getQueryAnswerCount(query, reasoner)); + System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query)); } private String queryOputputPath(PositiveLiteral query) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 7e1031f42..4bd0392c4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -88,9 +88,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final int resultCount = ExamplesUtils.getQueryAnswerCount("result(?X)", reasoner); - final int wdCount = ExamplesUtils.getQueryAnswerCount("inWd(?X)", reasoner); - final int dbpCount = ExamplesUtils.getQueryAnswerCount("inDbp(?X)", reasoner); + final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")); + final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")); + final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 050671ebb..63e1cb98e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -39,7 +39,7 @@ */ public class CountingTriangles { - public static void main(final String[] args) throws IOException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); KnowledgeBase kb; @@ -61,15 +61,18 @@ public static void main(final String[] args) throws IOException { /* Initialise reasoner and compute inferences */ reasoner.reason(); - System.out.print("Found " + ExamplesUtils.getQueryAnswerCount("country(?X)", reasoner) - + " countries in Wikidata"); + final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")); + final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + + System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: - System.out.println(", with " + (ExamplesUtils.getQueryAnswerCount("shareBorder(?X,?Y)", reasoner) / 2) - + " pairs of them sharing a border."); + System.out.println(", with " + (shareBorder / 2) + " pairs of them sharing a border."); // Due to symmetry, each triangle is found six times, hence we divide by 6: System.out.println("The number of triangles of countries that mutually border each other was " - + (ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6) + "."); + + (triangles / 6) + "."); } } -} + +} \ No newline at end of file diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index e237dc70c..96f573d87 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -44,7 +44,7 @@ */ public class DoidExample { - public static void main(final String[] args) throws IOException { + public static void main(final String[] args) throws IOException, ParsingException { ExamplesUtils.configureLogging(); /* Configure rules */ @@ -72,7 +72,8 @@ public static void main(final String[] args) throws IOException { final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - System.out.println(" " + queryString + ": " + ExamplesUtils.getQueryAnswerCount(queryString, reasoner)); + double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)); + System.out.println(" " + queryString + ": " + querySize); } } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 9e68b8406..87bc9927e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -87,10 +87,12 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - System.out.println("Number of vertices not reachable from vertex 1 by a bi-directional path: " - + ExamplesUtils.getQueryAnswerCount("unreachable(?X)", reasoner)); - System.out.println("Number of bi-directional triangles: " - + (ExamplesUtils.getQueryAnswerCount("triangle(?X,?Y,?Z)", reasoner) / 6)); + final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + + System.out + .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); + System.out.println("Number of bi-directional triangles: " + (triangles / 6)); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 1e7e5015c..138c58d4b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + ExamplesUtils.getQueryAnswerCount(queryHasPart, reasoner) + " results for hasPart(?X, ?Y)."); + + reasoner.queryAnswerSize(queryHasPart) + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 74a418a6f..e53b2845a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -129,11 +129,12 @@ public static void main(final String[] args) throws IOException { reasoner.reason(); System.out.println("... reasoning completed."); + final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + ExamplesUtils.getQueryAnswerCount("humansWhoDiedOfCancer(?X)", reasoner)); + + reasoner.queryAnswerSize(humansWhoDiedOfCancer)); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + ExamplesUtils.getQueryAnswerCount(humansWhoDiedOfNoncancer, reasoner)); + + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer)); System.out.println("Done."); } From 23dd7d92ec8bd42fe77c502c521f13f5d174d60b Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 4 Nov 2019 16:40:39 +0100 Subject: [PATCH 0594/1255] auto-format code --- .../vlog4j/core/reasoner/KnowledgeBase.java | 21 +++---- .../core/reasoner/KnowledgeBaseListener.java | 19 +++--- .../core/reasoner/KnowledgeBaseTest.java | 6 +- .../implementation/AddDataSourceTest.java | 58 ++++++++++--------- .../QueryAnsweringCorrectnessTest.java | 24 ++++---- .../VLogReasonerCombinedInputs.java | 3 +- .../implementation/VLogReasonerCsvInput.java | 6 +- .../implementation/VLogReasonerRdfInput.java | 9 ++- .../VLogReasonerSparqlInput.java | 26 +++++---- .../implementation/VLogReasonerStateTest.java | 33 +++++------ .../VLogToModelConverterTest.java | 10 ++-- 11 files changed, 108 insertions(+), 107 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index c5de17132..8e366147d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -198,8 +198,7 @@ public void deleteListener(KnowledgeBaseListener listener) { /** * Adds a single statement to the knowledge base. * - * @param statement - * the statement to be added + * @param statement the statement to be added * @return true, if the knowledge base has changed. */ public void addStatement(Statement statement) { @@ -226,8 +225,7 @@ boolean doAddStatement(Statement statement) { /** * Adds a collection of statements to the knowledge base. * - * @param statements - * the statements to be added + * @param statements the statements to be added */ public void addStatements(Collection statements) { final List addedStatements = new ArrayList<>(); @@ -244,8 +242,7 @@ public void addStatements(Collection statements) { /** * Adds a list of statements to the knowledge base. * - * @param statements - * the statements to be added + * @param statements the statements to be added */ public void addStatements(Statement... statements) { final List addedStatements = new ArrayList<>(); @@ -263,8 +260,7 @@ public void addStatements(Statement... statements) { * Removes a single statement from the knowledge base. * * @return true, if the knowledge base has changed. - * @param statement - * the statement to remove + * @param statement the statement to remove */ public void removeStatement(Statement statement) { if (doRemoveStatement(statement)) { @@ -275,8 +271,7 @@ public void removeStatement(Statement statement) { /** * Removes a single statement from the knowledge base. * - * @param statement - * the statement to remove + * @param statement the statement to remove * @return true, if the knowledge base has changed. */ boolean doRemoveStatement(Statement statement) { @@ -292,8 +287,7 @@ boolean doRemoveStatement(Statement statement) { /** * Removes a collection of statements to the knowledge base. * - * @param statements - * the statements to remove + * @param statements the statements to remove */ public void removeStatements(Collection statements) { final List removedStatements = new ArrayList<>(); @@ -310,8 +304,7 @@ public void removeStatements(Collection statements) { /** * Removes a list of statements from the knowledge base. * - * @param statements - * the statements to remove + * @param statements the statements to remove */ public void removeStatements(Statement... statements) { final List removedStatements = new ArrayList<>(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java index f4dbc47a0..ddbd4fa7a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java @@ -48,23 +48,22 @@ public interface KnowledgeBaseListener { * knowledge base. */ void onStatementsAdded(List statementsAdded); - - + /** - * Event triggered whenever a new statement is removed from the associated knowledge - * base. + * Event triggered whenever a new statement is removed from the associated + * knowledge base. * * @param statementRemoved statement removed from the knowledge base. */ void onStatementRemoved(Statement statementRemoved); - + /** - * Event triggered whenever new statements are removed from the associated knowledge - * base. + * Event triggered whenever new statements are removed from the associated + * knowledge base. * - * @param statementsRemoved a list of new statements that have been removed from the - * knowledge base. + * @param statementsRemoved a list of new statements that have been removed from + * the knowledge base. */ void onStatementsRemoved(List statementsRemoved); - + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index 7a0d5052b..61a274f23 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -71,7 +71,7 @@ public void testDoRemoveStatementInexistent() { assertFalse(removed); assertEquals(Arrays.asList(this.fact1, this.fact2, this.fact3), this.kb.getFacts()); assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(fact.getPredicate())); - + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact1.getPredicate())); assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact2.getPredicate())); assertEquals(Sets.newSet(this.fact3), this.kb.getFactsByPredicate().get(this.fact3.getPredicate())); @@ -79,14 +79,14 @@ public void testDoRemoveStatementInexistent() { @Test public void testDoRemoveStatementInexistentPredicate() { - + final Fact fact = Expressions.makeFact("R", Expressions.makeAbstractConstant("e")); final boolean removed = this.kb.doRemoveStatement(fact); assertFalse(removed); assertEquals(Arrays.asList(this.fact1, this.fact2, this.fact3), this.kb.getFacts()); assertEquals(null, this.kb.getFactsByPredicate().get(fact.getPredicate())); - + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact1.getPredicate())); assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact2.getPredicate())); assertEquals(Sets.newSet(this.fact3), this.kb.getFactsByPredicate().get(this.fact3.getPredicate())); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java index a1534d15d..db5c356ae 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java @@ -49,11 +49,13 @@ public class AddDataSourceTest { private static final String CSV_FILE_c_d_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"; - private final Set> csvFile_c1_c2_Content = new HashSet<>(Arrays - .asList(Arrays.asList(Expressions.makeAbstractConstant("c1")), Arrays.asList(Expressions.makeAbstractConstant("c2")))); + private final Set> csvFile_c1_c2_Content = new HashSet<>( + Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c1")), + Arrays.asList(Expressions.makeAbstractConstant("c2")))); private final Set> csvFile_c_d_Content = new HashSet<>( - Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c")), Arrays.asList(Expressions.makeAbstractConstant("d"))));; + Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c")), + Arrays.asList(Expressions.makeAbstractConstant("d"))));; @Test public void testAddDataSourceExistentDataForDifferentPredicates() throws IOException { @@ -99,13 +101,13 @@ public void testAddDataSourceBeforeLoading() throws IOException { kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); reasoner.load(); - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -122,22 +124,22 @@ public void testAddDataSourceAfterLoading() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); - + reasoner.load(); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } - + // there is no fact for predicate Q loaded in the reasoner - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -153,21 +155,21 @@ public void testAddDataSourceAfterReasoning() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); - + reasoner.reason(); kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } // there is no fact for predicate Q loaded in the reasoner - try (final QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -188,8 +190,8 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOExcept try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { System.out.println(QueryResultsUtils.collectQueryResults(queryResult)); } } @@ -210,8 +212,8 @@ public void testAddDataSourceNoFactsForPredicate() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { QueryResultsUtils.collectQueryResults(queryResult); } } @@ -230,8 +232,8 @@ public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOExce try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); expectedAnswers.addAll(csvFile_c_d_Content); @@ -255,8 +257,8 @@ public void testAddDataSourceAndFactsForPredicateAfterReasoning() throws IOExcep try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - try (QueryResultIterator queryResult = reasoner - .answerQuery(Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); expectedAnswers.add(Arrays.asList(Expressions.makeAbstractConstant("a"))); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java index a070f9416..3d8eea89b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java @@ -490,7 +490,7 @@ public void testStatementsArrayRemovalBeforeLoad() { } } } - + @Test public void testStatementRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -503,7 +503,7 @@ public void testStatementRemovalAfterLoad() throws IOException { } } } - + @Test public void testStatementsListRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -516,7 +516,7 @@ public void testStatementsListRemovalAfterLoad() throws IOException { } } } - + @Test public void testStatementsArrayRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -555,7 +555,7 @@ public void testStatementsListRemovalBeforeReason() throws IOException { } } } - + @Test public void testStatementsArrayRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -594,7 +594,7 @@ public void testStatementsListRemovalAfterReason() throws IOException { } } } - + @Test public void testStatementsArrayRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -621,7 +621,7 @@ public void testStatementNotRemovedAfterReason() throws IOException { } } } - + @Test public void testStatementsListNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -635,7 +635,7 @@ public void testStatementsListNotRemovedAfterReason() throws IOException { } } } - + @Test public void testStatementsArrayNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -649,7 +649,7 @@ public void testStatementsArrayNotRemovedAfterReason() throws IOException { } } } - + @Test public void testRemoveAndAddStatements() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -663,11 +663,11 @@ public void testRemoveAndAddStatements() throws IOException { } } } - + @Test public void testRemoveAndAddSameStatementOnlyFacts() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc,factPd); + kb.addStatements(factPc, factPd); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); kb.removeStatements(factPc); @@ -677,11 +677,11 @@ public void testRemoveAndAddSameStatementOnlyFacts() throws IOException { } } } - + @Test public void testRemoveAndAddStatementsOnlyFacts() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc,factPd); + kb.addStatements(factPc, factPd); try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); kb.removeStatements(factPc, factPd); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 528dfc7bf..084c1b321 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -56,7 +56,8 @@ public class VLogReasonerCombinedInputs { final Fact factQc2 = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("c2"))); final Fact factQd = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("d"))); final Fact factPd = Expressions.makeFact("p", Arrays.asList(Expressions.makeAbstractConstant("d"))); - final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, Arrays.asList(Expressions.makeUniversalVariable("x"))); + final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, + Arrays.asList(Expressions.makeUniversalVariable("x"))); final Set> resultsCC1C2D = new HashSet<>( Arrays.asList(Collections.singletonList(Expressions.makeAbstractConstant("c")), diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java index f8a55c8b3..2b7d85c58 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -53,12 +53,12 @@ public class VLogReasonerCsvInput { private final Constant c2 = Expressions.makeAbstractConstant("c2"); @SuppressWarnings("unchecked") - private final Set> expectedUnaryQueryResult = Sets.newSet(Arrays.asList(this.c1), Arrays.asList(this.c2)); + private final Set> expectedUnaryQueryResult = Sets.newSet(Arrays.asList(this.c1), + Arrays.asList(this.c2)); @Test public void testLoadEmptyCsvFile() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, - this.x); + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, this.x); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv"))); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java index b00fd9d28..d278116a3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -44,13 +44,16 @@ public class VLogReasonerRdfInput { private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, - Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), Expressions.makeUniversalVariable("o")); + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeUniversalVariable("o")); @SuppressWarnings("unchecked") private static final Set> expectedTernaryQueryResult = Sets.newSet( - Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), Expressions.makeAbstractConstant("http://example.org/p"), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/p"), Expressions.makeAbstractConstant("http://example.org/c2")), - Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), Expressions.makeAbstractConstant("http://example.org/q"), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Ignore diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java index ad16cb1ea..521812028 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -65,8 +65,9 @@ public void testSimpleSparqlQuery() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - fatherOfPredicate, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -90,8 +91,9 @@ public void testSimpleSparqlQueryHttps() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - fatherOfPredicate, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -124,8 +126,9 @@ public void testSimpleSparqlQuery2() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - fatherOfPredicate, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); } @@ -147,8 +150,8 @@ public void testConjunctiveQueryNewLineCharacterInQueryBody() throws IOException try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, Expressions.makeUniversalVariable("x"), - Expressions.makeUniversalVariable("y")), false); + reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false); } } @@ -167,8 +170,11 @@ public void testConjunctiveQuery() throws IOException { try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makePositiveLiteral( - haveChildrenTogether, Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery( + Expressions.makePositiveLiteral(haveChildrenTogether, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), + false)) { assertTrue(answerQuery.hasNext()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java index 4cbea07ad..6fb85e3c8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java @@ -79,7 +79,7 @@ public void testFailExportQueryAnswersBeforeLoad() throws IOException { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); } } - + @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryAfterReset() throws IOException { try (final Reasoner reasoner = Reasoner.getInstance()) { @@ -318,7 +318,7 @@ public void testCloseRepeatedly() throws IOException { reasoner.close(); } } - + @Test public void testStatementRemovalBeforeLoad() { final KnowledgeBase kb = new KnowledgeBase(); @@ -328,7 +328,7 @@ public void testStatementRemovalBeforeLoad() { assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalBeforeLoad() { final KnowledgeBase kb = new KnowledgeBase(); @@ -338,7 +338,7 @@ public void testStatementsListRemovalBeforeLoad() { assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalBeforeLoad() { final KnowledgeBase kb = new KnowledgeBase(); @@ -348,7 +348,7 @@ public void testStatementsArrayRemovalBeforeLoad() { assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); } } - + @Test public void testStatementRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -359,7 +359,7 @@ public void testStatementRemovalAfterLoad() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -370,7 +370,7 @@ public void testStatementsListRemovalAfterLoad() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalAfterLoad() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -381,7 +381,7 @@ public void testStatementsArrayRemovalAfterLoad() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -392,7 +392,7 @@ public void testStatementRemovalBeforeReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -403,7 +403,7 @@ public void testStatementsListRemovalBeforeReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalBeforeReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -414,7 +414,7 @@ public void testStatementsArrayRemovalBeforeReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -425,7 +425,7 @@ public void testStatementRemovalAfterReason() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -436,7 +436,7 @@ public void testStatementsListRemovalAfterReason() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayRemovalAfterReason() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); @@ -447,7 +447,7 @@ public void testStatementsArrayRemovalAfterReason() throws IOException { assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); } } - + @Test public void testStatementNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -459,7 +459,7 @@ public void testStatementNotRemovedAfterReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsListNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -471,7 +471,7 @@ public void testStatementsListNotRemovedAfterReason() throws IOException { assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); } } - + @Test public void testStatementsArrayListNotRemovedAfterReason() throws IOException { final Fact newFact = Expressions.makeFact("newPred", c); @@ -484,5 +484,4 @@ public void testStatementsArrayListNotRemovedAfterReason() throws IOException { } } - } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java index d9246a1e3..0a80eb198 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java @@ -65,20 +65,18 @@ public void testLanguageStringConversion() { Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } - + @Test public void testNamedNullConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, - "_123"); + karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); Term vLog4jTerm = new NamedNullImpl("_123"); Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } - + @Test(expected = IllegalArgumentException.class) public void testVariableConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "X"); + karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); VLogToModelConverter.toTerm(vLogTerm); } From 3dbb6918616e18f49c176bb69c0620320859ccff Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 7 Nov 2019 15:02:32 +0100 Subject: [PATCH 0595/1255] added OwlFeatureNotSupportedException for SWRL rules --- .../org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index ac158bae0..e50f3c12b 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -520,7 +520,7 @@ public void visit(final OWLHasKeyAxiom axiom) { @Override public void visit(final SWRLRule rule) { - // TODO support SWRL rules + throw new OwlFeatureNotSupportedException("SWRLRule currently not supported."); } From 12e2cb8db2cfc150d86f4f1983c0a8a01991b0d9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 7 Nov 2019 15:37:14 +0100 Subject: [PATCH 0596/1255] OWLObjectOneOf in body converter --- .../vlog4j/owlapi/ClassToRuleBodyConverter.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java index e6b19f79e..28265f47a 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java @@ -153,13 +153,7 @@ public void visit(final OWLObjectHasSelf ce) { @Override public void visit(final OWLObjectOneOf ce) { - // ce.individuals().forEach(individual -> { - // final Term individualTerm = - // OwlToRulesConversionHelper.getIndividualTerm(individual); - // replaceTerm(this.body, individualTerm); - // replaceTerm(this.head, individualTerm); - // }); - // TODO Auto-generated method parenstub + throw new RuntimeException("This should never occur: BodyConverter for "+ce); } From 0a3be6c275345642ff4954e1cb301cc655618fd9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 7 Nov 2019 15:37:30 +0100 Subject: [PATCH 0597/1255] OWLObjectOneOf in body converter --- .../owlapi/OwlAxiomToRulesConverter.java | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index e50f3c12b..13dab444a 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -221,24 +221,24 @@ void startAxiomConversion() { * @param superClass */ void addSubClassAxiom(final OWLClassExpression subClass, final OWLClassExpression superClass) { - this.startAxiomConversion(); - - final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(this.frontierVariable, this); - superClass.accept(headConverter); - final ClassToRuleBodyConverter bodyConverter = new ClassToRuleBodyConverter(this.frontierVariable, - headConverter.body, headConverter.head, this); - bodyConverter.handleDisjunction(subClass, this.frontierVariable); - this.addRule(bodyConverter); + if (subClass instanceof OWLObjectOneOf) { + final OWLObjectOneOf subClassInstaceOf = (OWLObjectOneOf) subClass; + subClassInstaceOf.individuals().forEach(individual -> visitClassAssertionAxiom(individual, superClass)); + } else { + this.startAxiomConversion(); + + final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(this.frontierVariable, this); + superClass.accept(headConverter); + final ClassToRuleBodyConverter bodyConverter = new ClassToRuleBodyConverter(this.frontierVariable, + headConverter.body, headConverter.head, this); + bodyConverter.handleDisjunction(subClass, this.frontierVariable); + this.addRule(bodyConverter); + } } @Override public void visit(final OWLSubClassOfAxiom axiom) { - if (axiom.getSubClass() instanceof OWLObjectOneOf) { - final OWLObjectOneOf subClass = (OWLObjectOneOf) axiom.getSubClass(); - subClass.individuals().forEach(individual -> visitClassAssertionAxiom(individual, axiom.getSuperClass())); - } else { - this.addSubClassAxiom(axiom.getSubClass(), axiom.getSuperClass()); - } + this.addSubClassAxiom(axiom.getSubClass(), axiom.getSuperClass()); } @Override From bd9058bc6beb0bcfcd5fe90e02b8d7b878c226d9 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 7 Nov 2019 16:14:49 +0100 Subject: [PATCH 0598/1255] fixed some styles --- .../model/implementation/ConjunctionImpl.java | 2 +- .../DataSourceDeclarationImpl.java | 5 +- .../core/model/implementation/Serializer.java | 46 +++++++++++++++---- .../core/model/ConjunctionImplTest.java | 2 +- .../core/model/DataSourceDeclarationTest.java | 2 +- .../vlog4j/core/model/FactTest.java | 2 +- .../vlog4j/core/model/TermImplTest.java | 12 ++--- 7 files changed, 48 insertions(+), 23 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 69df24198..735fea4e9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -95,7 +95,7 @@ public String toString() { } else { stringBuilder.append(", "); } - stringBuilder.append(Serializer.getLiteralString((AbstractLiteralImpl) literal)); + stringBuilder.append(Serializer.getLiteralString(literal)); } return stringBuilder.toString(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index 217538610..a40eae7b7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -77,7 +77,7 @@ public boolean equals(final Object obj) { return (this.predicate.equals(other.getPredicate())) && this.dataSource.equals(other.getDataSource()); } - + @Override public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); @@ -85,8 +85,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return "@source " + Serializer.getPredicateString(this.predicate) + "(" + this.predicate.getArity() + ") : " - + this.dataSource.toConfigString() + " ."; + return Serializer.getDataSourceDeclarationString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 01496eb18..cd5bab3aa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -24,16 +24,22 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; +import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; import org.semanticweb.vlog4j.core.model.api.Variable; /** - * Simple class implementation of various toString methods to ensure the correct - * parsable string output of the different Data models. + * A utility class with static methods to obtain the correct parsable string + * representation of the different data models. * * @author Ali Elhalawati * @@ -44,11 +50,11 @@ private Serializer() { } - public static String getRuleString(RuleImpl rule) { + public static String getRuleString(Rule rule) { return rule.getHead() + " :- " + rule.getBody() + "."; } - public static String getLiteralString(AbstractLiteralImpl literal) { + public static String getLiteralString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { stringBuilder.append("~"); @@ -67,27 +73,27 @@ public static String getLiteralString(AbstractLiteralImpl literal) { return stringBuilder.toString(); } - public static String getConstantString(AbstractConstantImpl constant) { + public static String getConstantString(Constant constant) { return constant.getName(); } - public static String getExistentialVarString(ExistentialVariableImpl existentialvariable) { + public static String getExistentialVarString(ExistentialVariable existentialvariable) { return "!" + existentialvariable.getName(); } - public static String getUniversalVarString(UniversalVariableImpl universalvariable) { + public static String getUniversalVarString(UniversalVariable universalvariable) { return "?" + universalvariable.getName(); } - public static String getDatatypeConstantString(DatatypeConstantImpl datatypeconstant) { + public static String getDatatypeConstantString(DatatypeConstant datatypeconstant) { return datatypeconstant.getName(); } - public static String getNamedNullString(NamedNullImpl namednull) { + public static String getNamedNullString(NamedNull namednull) { return "_" + namednull.getName(); } - public static String getLanguageConstantString(LanguageStringConstantImpl languagestringconstant) { + public static String getLanguageConstantString(LanguageStringConstant languagestringconstant) { return languagestringconstant.getName(); } @@ -95,4 +101,24 @@ public static String getPredicateString(Predicate predicate) { return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } + public static String getDataSourceDeclarationString(DataSourceDeclaration datasourcedeclaration) { + return "@source " + getPredicateString(datasourcedeclaration.getPredicate()) + "(" + + datasourcedeclaration.getPredicate().getArity() + ") : " + + datasourcedeclaration.getDataSource().toConfigString() + " ."; + } + + public static String getConjunctionString(Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(", "); + } + stringBuilder.append(getLiteralString(literal)); + } + return stringBuilder.toString(); + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index 4df0c8f68..c3c632c77 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -182,7 +182,7 @@ public void negativeLiteralsNoNullElements() { } @Test - public void testconjunctiontoString() { + public void conjunctiontToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeUniversalVariable("Y"); final Constant c = Expressions.makeAbstractConstant("c"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 4e35fcf32..3091216d5 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -63,7 +63,7 @@ public void equalityTest() throws MalformedURLException { } @Test - public void testdataSourceDecalarationtoString() throws MalformedURLException { + public void dataSourceDecalarationToStringTest() throws MalformedURLException { DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); Predicate predicate1 = Expressions.makePredicate("p", 3); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index d2da2f1de..7f39dd875 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -55,7 +55,7 @@ public void factsOnlyContainConstants() { } @Test - public void testtoString() { + public void factToStringTest() { final Predicate p = Expressions.makePredicate("p", 2); final Constant c = Expressions.makeAbstractConstant("c"); final Constant d = Expressions.makeAbstractConstant("d"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 1c9e98f1f..2a218a6a2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -133,37 +133,37 @@ public void namedNullGetterTest() { } @Test - public void testabstractConstantImpltoString() { + public void abstractConstantToStringTest() { AbstractConstantImpl c = new AbstractConstantImpl("c"); assertEquals("c", c.toString()); } @Test - public void testdatatypeConstantImpltoString() { + public void datatypeConstantToStringTest() { DatatypeConstantImpl c = new DatatypeConstantImpl("c", "http://example.org/mystring"); assertEquals("\"c\"^^", c.toString()); } @Test - public void testlanguageStringConstantImpltoString() { + public void languageStringConstantToStringTest() { LanguageStringConstantImpl c = new LanguageStringConstantImpl("Test", "en"); assertEquals("\"Test\"@en", c.toString()); } @Test - public void testuniversalVariabletoString() { + public void universalVariableToStringTest() { UniversalVariableImpl v = new UniversalVariableImpl("v"); assertEquals("?v", v.toString()); } @Test - public void testexistentialVariabletoString() { + public void existentialVariableToStringTest() { ExistentialVariableImpl v = new ExistentialVariableImpl("v"); assertEquals("!v", v.toString()); } @Test - public void testnamedNulltoString() { + public void namedNullToStringTest() { NamedNullImpl n = new NamedNullImpl("123"); assertEquals("_123", n.toString()); } From 8825fa8a85d51a2adff94dc5f5359fbbc2cc8a02 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 7 Nov 2019 16:47:41 +0100 Subject: [PATCH 0599/1255] changed some styles --- .../vlog4j/core/model/implementation/FactImpl.java | 5 ++--- .../vlog4j/core/model/implementation/Serializer.java | 2 +- .../java/org/semanticweb/vlog4j/core/model/RuleImplTest.java | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index b08939577..63a931e99 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -47,11 +47,10 @@ public FactImpl(Predicate predicate, List terms) { public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } - + @Override public String toString() { - return Serializer.getLiteralString(this)+"."; + return Serializer.getLiteralString(this) + "."; } - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index cd5bab3aa..3cffd0168 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -106,7 +106,7 @@ public static String getDataSourceDeclarationString(DataSourceDeclaration dataso + datasourcedeclaration.getPredicate().getArity() + ") : " + datasourcedeclaration.getDataSource().toConfigString() + " ."; } - + public static String getConjunctionString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index 7c05f8b61..ef5dce07b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -139,7 +139,7 @@ public void noUnsafeVariables() { } @Test - public void testtoString() { + public void ruleToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeExistentialVariable("Y"); final Variable z = Expressions.makeUniversalVariable("Z"); From be2656160aa4a01e18f82430c3a726e619775ea4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 8 Nov 2019 14:11:20 +0100 Subject: [PATCH 0600/1255] draft support nominals in complex class expressions --- .../owlapi/ClassToRuleBodyConverter.java | 27 +++- .../vlog4j/owlapi/RulesHelper.java | 42 +++++++ .../owlapi/OwlAxiomToRulesConverterTest.java | 115 ++++++++++++++---- .../vlog4j/owlapi/TestRulesHelper.java | 25 ++++ 4 files changed, 181 insertions(+), 28 deletions(-) create mode 100644 vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java create mode 100644 vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java index 28265f47a..01ea37ff9 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.owlapi; +import java.util.ArrayList; + /*- * #%L * VLog4j OWL API Support @@ -21,6 +23,8 @@ */ import java.util.Arrays; +import java.util.List; +import java.util.function.UnaryOperator; import java.util.stream.Collectors; import org.semanticweb.owlapi.model.OWLClass; @@ -42,6 +46,7 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -153,8 +158,26 @@ public void visit(final OWLObjectHasSelf ce) { @Override public void visit(final OWLObjectOneOf ce) { - throw new RuntimeException("This should never occur: BodyConverter for "+ce); - + System.out.println("Body"); + System.out.println(this.body.getConjuncts()); + System.out.println("Head"); + System.out.println(this.head.getConjuncts()); + + System.out.println("Parent body: " + parent.rules); + + ce.individuals().forEach(individual -> { + final Term individualTerm = OwlToRulesConversionHelper.getIndividualTerm(individual); + + if (this.body.exists()) { + SimpleConjunction newBody = RulesHelper.replaceTerm(this.body, this.mainTerm, individualTerm); + System.out.println("New Body: " + newBody); + } + if (this.head.exists()) { + SimpleConjunction newHead = RulesHelper.replaceTerm(this.head, this.mainTerm, individualTerm); + System.out.println("New Head: " + newHead); + } + + }); } @Override diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java new file mode 100644 index 000000000..74e9c1822 --- /dev/null +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java @@ -0,0 +1,42 @@ +package org.semanticweb.vlog4j.owlapi; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.UnaryOperator; + +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; + +public final class RulesHelper { + + private RulesHelper() {} + + static SimpleConjunction replaceTerm(SimpleConjunction conjunction, Term mainTerm, Term individualTerm) { + SimpleConjunction newSimpleConjunction = new SimpleConjunction(); + conjunction.getConjuncts().forEach(conjunct -> { + PositiveLiteral newLiteral = replaceTerm(conjunct, mainTerm, individualTerm); + newSimpleConjunction.add(newLiteral); + }); + return newSimpleConjunction; + } + + static PositiveLiteral replaceTerm(PositiveLiteral positiveLiteral, Term sourceTerm, Term targetTerm) { + + List arguments = positiveLiteral.getArguments(); + List modifiableArguments = replaceTerm(sourceTerm, targetTerm, arguments); + + return new PositiveLiteralImpl(positiveLiteral.getPredicate(), modifiableArguments); + } + + static List replaceTerm(Term sourceTerm, Term targetTerm, List terms) { + List newTerms = new ArrayList<>(terms); + + UnaryOperator replaceSourceTerm = term -> term.equals(sourceTerm) ? targetTerm : term; + newTerms.replaceAll(replaceSourceTerm); + + return newTerms; + } + +} diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java index 6e10442ac..ac5184f91 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java @@ -36,11 +36,13 @@ import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; @@ -72,29 +74,33 @@ public static Predicate getPropertyPredicate(final String localName) { return Expressions.makePredicate("http://example.org/" + localName, 2); } - static OWLClass cA = getOwlClass("A"); - static OWLClass cB = getOwlClass("B"); - static OWLClass cC = getOwlClass("C"); - static OWLClass cD = getOwlClass("D"); - static OWLClass cE = getOwlClass("E"); - static OWLObjectProperty pR = getOwlObjectProperty("Rule"); - static OWLObjectProperty pS = getOwlObjectProperty("S"); - static OWLObjectProperty pT = getOwlObjectProperty("T"); - static OWLObjectProperty pU = getOwlObjectProperty("U"); - - static Predicate nA = getClassPredicate("A"); - static Predicate nB = getClassPredicate("B"); - static Predicate nC = getClassPredicate("C"); - static Predicate nD = getClassPredicate("D"); - static Predicate nE = getClassPredicate("E"); - static Predicate nR = getPropertyPredicate("Rule"); - static Predicate nS = getPropertyPredicate("S"); - static Predicate nT = getPropertyPredicate("T"); - static Predicate nU = getPropertyPredicate("U"); - - static OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); - static OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); - static OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); + static final OWLClass cA = getOwlClass("A"); + static final OWLClass cB = getOwlClass("B"); + static final OWLClass cC = getOwlClass("C"); + static final OWLClass cD = getOwlClass("D"); + static final OWLClass cE = getOwlClass("E"); + static final OWLObjectProperty pR = getOwlObjectProperty("Rule"); + static final OWLObjectProperty pS = getOwlObjectProperty("S"); + static final OWLObjectProperty pT = getOwlObjectProperty("T"); + static final OWLObjectProperty pU = getOwlObjectProperty("U"); + + static final Predicate nA = getClassPredicate("A"); + static final Predicate nB = getClassPredicate("B"); + static final Predicate nC = getClassPredicate("C"); + static final Predicate nD = getClassPredicate("D"); + static final Predicate nE = getClassPredicate("E"); + static final Predicate nR = getPropertyPredicate("Rule"); + static final Predicate nS = getPropertyPredicate("S"); + static final Predicate nT = getPropertyPredicate("T"); + static final Predicate nU = getPropertyPredicate("U"); + + static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); + static final OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); + static final OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); + + static final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); + static final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); + static final Term constc = Expressions.makeAbstractConstant(getIri("c").toString()); @Test public void testSimpleRule() { @@ -387,9 +393,7 @@ public void testClassAssertions() { Ca.accept(converter); BandhasRba.accept(converter); - final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); - final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); - final Term constc = Expressions.makeAbstractConstant(getIri("c").toString()); + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, constc); final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, consta); final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, consta, constb); @@ -632,6 +636,65 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } + @Test + public void testNominalSubClassOfClass() { + OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(oneOfa, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Fact expectedFact = Expressions.makeFact(nA, consta); + assertEquals(Collections.singleton(expectedFact), converter.facts); + assertTrue(converter.rules.isEmpty()); + } + + @Test + public void testNominalsSubClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(oneOfab, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Fact expectedFact1 = Expressions.makeFact(nA, consta); + final Fact expectedFact2 = Expressions.makeFact(nA, constb); + + assertEquals(Sets.newSet(expectedFact1,expectedFact2), converter.facts); + assertTrue(converter.rules.isEmpty()); + } + + @Test + public void testNominalsInConjunctionSubClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(oneOfab,cB); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); +//TODO + System.out.println(converter.rules); + + } + + @Test(expected = OwlFeatureNotSupportedException.class) + public void testNominalSuperClassOfClass() { + OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, oneOfa); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + @Test(expected = OwlFeatureNotSupportedException.class) + public void testNominalsSuperClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA,oneOfab); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + @Ignore public void test() { final OWLObjectPropertyExpression Sinv = df.getOWLObjectInverseOf(pS); diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java new file mode 100644 index 000000000..f7cee4055 --- /dev/null +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java @@ -0,0 +1,25 @@ +package org.semanticweb.vlog4j.owlapi; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + +public class TestRulesHelper { + + @Test + public void testReplaceTerm() { + AbstractConstant c1 = Expressions.makeAbstractConstant("c1"); + UniversalVariable v1 = Expressions.makeUniversalVariable("v1"); + UniversalVariable v2 = Expressions.makeUniversalVariable("v2"); + + PositiveLiteral positiveLiteral = Expressions.makePositiveLiteral("a", v1, v1, v2, c1); + + PositiveLiteral expectedLiteral = Expressions.makePositiveLiteral("a", c1, c1, v2, c1); + assertEquals(expectedLiteral, RulesHelper.replaceTerm(positiveLiteral, v1, c1)); + } + +} From 40ac51a18d53f69f13a4bfcc2f72e9a9f64be1fd Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 8 Nov 2019 21:21:20 +0100 Subject: [PATCH 0601/1255] fixed styles and duplicates --- .../model/implementation/ConjunctionImpl.java | 12 +------- .../implementation/DatatypeConstantImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../model/implementation/PredicateImpl.java | 8 ++--- .../core/model/implementation/Serializer.java | 30 +++++++------------ .../core/model/ConjunctionImplTest.java | 2 +- 6 files changed, 18 insertions(+), 38 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 735fea4e9..c28374ad7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -87,17 +87,7 @@ public Iterator iterator() { @Override public String toString() { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final T literal : this.literals) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(Serializer.getLiteralString(literal)); - } - return stringBuilder.toString(); + return Serializer.getConjunctionString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index 907758747..0f7d85135 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -60,7 +60,7 @@ public String getLexicalValue() { @Override public String toString() { - return Serializer.getDatatypeConstantString(this); + return Serializer.getConstantString(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index 084143187..7e8657216 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -84,7 +84,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return Serializer.getLanguageConstantString(this); + return Serializer.getConstantString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java index 3ad38a877..27d632c6d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java @@ -40,10 +40,8 @@ public class PredicateImpl implements Predicate { /** * Constructor for {@link Predicate}s of arity 1 or higher. * - * @param name - * a non-blank String (not null, nor empty or whitespace). - * @param arity - * an int value strictly greater than 0. + * @param name a non-blank String (not null, nor empty or whitespace). + * @param arity an int value strictly greater than 0. */ public PredicateImpl(@NonNull String name, int arity) { Validate.notBlank(name, "Predicates cannot be named by blank Strings."); @@ -89,7 +87,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return "PredicateImpl [name=" + this.name + ", arity=" + this.arity + "]"; + return Serializer.getPredicateString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 3cffd0168..839db2ff9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -77,37 +77,29 @@ public static String getConstantString(Constant constant) { return constant.getName(); } - public static String getExistentialVarString(ExistentialVariable existentialvariable) { - return "!" + existentialvariable.getName(); + public static String getExistentialVarString(ExistentialVariable existentialVariable) { + return "!" + existentialVariable.getName(); } - public static String getUniversalVarString(UniversalVariable universalvariable) { - return "?" + universalvariable.getName(); + public static String getUniversalVarString(UniversalVariable universalVariable) { + return "?" + universalVariable.getName(); } - public static String getDatatypeConstantString(DatatypeConstant datatypeconstant) { - return datatypeconstant.getName(); - } - - public static String getNamedNullString(NamedNull namednull) { - return "_" + namednull.getName(); - } - - public static String getLanguageConstantString(LanguageStringConstant languagestringconstant) { - return languagestringconstant.getName(); + public static String getNamedNullString(NamedNull namedNull) { + return "_" + namedNull.getName(); } public static String getPredicateString(Predicate predicate) { return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } - public static String getDataSourceDeclarationString(DataSourceDeclaration datasourcedeclaration) { - return "@source " + getPredicateString(datasourcedeclaration.getPredicate()) + "(" - + datasourcedeclaration.getPredicate().getArity() + ") : " - + datasourcedeclaration.getDataSource().toConfigString() + " ."; + public static String getDataSourceDeclarationString(DataSourceDeclaration dataSourceDeclaration) { + return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + + dataSourceDeclaration.getPredicate().getArity() + ") : " + + dataSourceDeclaration.getDataSource().toConfigString() + " ."; } - public static String getConjunctionString(Conjunction conjunction) { + public static String getConjunctionString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; for (final Literal literal : conjunction.getLiterals()) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index c3c632c77..3394d45cb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -182,7 +182,7 @@ public void negativeLiteralsNoNullElements() { } @Test - public void conjunctiontToStringTest() { + public void conjunctionToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeUniversalVariable("Y"); final Constant c = Expressions.makeAbstractConstant("c"); From 33890bf622096205d899252ede54a9d6a16c100e Mon Sep 17 00:00:00 2001 From: alloka Date: Sun, 10 Nov 2019 20:37:08 +0100 Subject: [PATCH 0602/1255] added more tests and modified serializer conjunction --- .../core/model/implementation/Serializer.java | 2 +- .../vlog4j/core/model/ConjunctionImplTest.java | 10 ++++++---- .../vlog4j/core/model/RuleImplTest.java | 18 +++++++++++++++++- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 839db2ff9..8ab8b7455 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -51,7 +51,7 @@ private Serializer() { } public static String getRuleString(Rule rule) { - return rule.getHead() + " :- " + rule.getBody() + "."; + return getConjunctionString(rule.getHead()) + " :- " + getConjunctionString(rule.getBody()) + "."; } public static String getLiteralString(Literal literal) { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java index 3394d45cb..db8fdcf30 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java @@ -190,10 +190,12 @@ public void conjunctionToStringTest() { final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y, x); final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); - final List positiveLiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, - positiveLiteral3); - final Conjunction conjunction1 = new ConjunctionImpl<>(positiveLiteralList); - assertEquals("p(?X, c), p(?Y, ?X), q(?X, d)", conjunction1.toString()); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, d); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); + final Conjunction conjunction1 = new ConjunctionImpl<>(LiteralList); + assertEquals("p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, d)", conjunction1.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index ef5dce07b..a2214fc92 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -23,13 +23,18 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; +import java.util.Arrays; +import java.util.List; + import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; @@ -143,15 +148,26 @@ public void ruleToStringTest() { final Variable x = Expressions.makeUniversalVariable("X"); final Variable y = Expressions.makeExistentialVariable("Y"); final Variable z = Expressions.makeUniversalVariable("Z"); + final Variable y2 = Expressions.makeUniversalVariable("Y"); + final Constant d = Expressions.makeAbstractConstant("d"); final Constant c = Expressions.makeAbstractConstant("c"); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, d); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); - final Conjunction bodyPositiveLiterals = Expressions.makePositiveConjunction(atom1, atom2); + final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, d).", rule2.toString()); } From 706a3f212c8760b4d4afb2b976e9a6f8ca600e92 Mon Sep 17 00:00:00 2001 From: alloka Date: Sun, 10 Nov 2019 20:44:18 +0100 Subject: [PATCH 0603/1255] added languagestringconstant in rule test --- .../org/semanticweb/vlog4j/core/model/RuleImplTest.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index a2214fc92..461a439f1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -36,6 +36,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; public class RuleImplTest { @@ -151,6 +152,7 @@ public void ruleToStringTest() { final Variable y2 = Expressions.makeUniversalVariable("Y"); final Constant d = Expressions.makeAbstractConstant("d"); final Constant c = Expressions.makeAbstractConstant("c"); + LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); @@ -158,7 +160,7 @@ public void ruleToStringTest() { final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); - final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, s); final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, NegativeLiteral, PositiveLiteral4); final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); @@ -167,7 +169,7 @@ public void ruleToStringTest() { final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); - assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, d).", rule2.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).", rule2.toString()); } From d6e07b44a077b708ff8f7314119d90f02c7469c1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 11 Nov 2019 17:40:11 +0100 Subject: [PATCH 0604/1255] throw Not Supported exception for encountering nominals in complex class expressions --- .../owlapi/ClassToRuleBodyConverter.java | 28 +------- .../owlapi/OwlAxiomToRulesConverterTest.java | 65 +++++++++++++++---- 2 files changed, 56 insertions(+), 37 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java index 01ea37ff9..027ebb4af 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.owlapi; -import java.util.ArrayList; - /*- * #%L * VLog4j OWL API Support @@ -23,8 +21,6 @@ */ import java.util.Arrays; -import java.util.List; -import java.util.function.UnaryOperator; import java.util.stream.Collectors; import org.semanticweb.owlapi.model.OWLClass; @@ -46,7 +42,6 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; @@ -156,28 +151,11 @@ public void visit(final OWLObjectHasSelf ce) { this.body); } + // TODO support this feature @Override public void visit(final OWLObjectOneOf ce) { - System.out.println("Body"); - System.out.println(this.body.getConjuncts()); - System.out.println("Head"); - System.out.println(this.head.getConjuncts()); - - System.out.println("Parent body: " + parent.rules); - - ce.individuals().forEach(individual -> { - final Term individualTerm = OwlToRulesConversionHelper.getIndividualTerm(individual); - - if (this.body.exists()) { - SimpleConjunction newBody = RulesHelper.replaceTerm(this.body, this.mainTerm, individualTerm); - System.out.println("New Body: " + newBody); - } - if (this.head.exists()) { - SimpleConjunction newHead = RulesHelper.replaceTerm(this.head, this.mainTerm, individualTerm); - System.out.println("New Head: " + newHead); - } - - }); + throw new OwlFeatureNotSupportedException( + "OWLObjectOneOf in complex class expressions currently not supported!"); } @Override diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java index ac5184f91..5873e4585 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java @@ -97,7 +97,7 @@ public static Predicate getPropertyPredicate(final String localName) { static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); static final OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); static final OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); - + static final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); static final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); static final Term constc = Expressions.makeAbstractConstant(getIri("c").toString()); @@ -393,7 +393,6 @@ public void testClassAssertions() { Ca.accept(converter); BandhasRba.accept(converter); - final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, constc); final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, consta); final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, consta, constb); @@ -636,6 +635,9 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } + /* + * {a} \sqsubseteq A + */ @Test public void testNominalSubClassOfClass() { OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); @@ -643,12 +645,15 @@ public void testNominalSubClassOfClass() { final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); - + final Fact expectedFact = Expressions.makeFact(nA, consta); assertEquals(Collections.singleton(expectedFact), converter.facts); assertTrue(converter.rules.isEmpty()); } + /* + * {a,b} \sqsubseteq A + */ @Test public void testNominalsSubClassOfClass() { OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); @@ -660,23 +665,56 @@ public void testNominalsSubClassOfClass() { final Fact expectedFact1 = Expressions.makeFact(nA, consta); final Fact expectedFact2 = Expressions.makeFact(nA, constb); - assertEquals(Sets.newSet(expectedFact1,expectedFact2), converter.facts); + assertEquals(Sets.newSet(expectedFact1, expectedFact2), converter.facts); assertTrue(converter.rules.isEmpty()); } + + /* + * ({a,b} \sqcap B) \sqsubseteq A + */ + @Test(expected = OwlFeatureNotSupportedException.class) + // TODO support this feature + public void testNominalsInConjunctionLeftSubClassOfClass() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(oneOfab, cB); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } - @Test - public void testNominalsInConjunctionSubClassOfClass() { + /* + * (B \sqcap {a,b}) \sqsubseteq A + */ + @Test(expected = OwlFeatureNotSupportedException.class) + // TODO support this feature + public void testNominalsInConjunctionRightSubClassOfClass() { OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); - OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(oneOfab,cB); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(cB, oneOfab); OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); -//TODO - System.out.println(converter.rules); - } + + /* + * A \sqsubseteq (B \sqcap {a,b}) + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testClassSubClassOfNominalsInConjunctionRight() { + OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(cB, oneOfab); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, conjunction); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + + /* + * A \sqsubseteq {a} + */ @Test(expected = OwlFeatureNotSupportedException.class) public void testNominalSuperClassOfClass() { OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); @@ -685,11 +723,14 @@ public void testNominalSuperClassOfClass() { final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); } - + + /* + * A \sqsubseteq {a,b} + */ @Test(expected = OwlFeatureNotSupportedException.class) public void testNominalsSuperClassOfClass() { OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA,oneOfab); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, oneOfab); final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); From f39d071fff386c9456dc44e30b0e93f7bd47a039 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 11 Nov 2019 17:44:54 +0100 Subject: [PATCH 0605/1255] remove unused helper class. --- .../vlog4j/owlapi/RulesHelper.java | 42 ------------------- .../vlog4j/owlapi/TestRulesHelper.java | 25 ----------- 2 files changed, 67 deletions(-) delete mode 100644 vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java delete mode 100644 vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java deleted file mode 100644 index 74e9c1822..000000000 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/RulesHelper.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -import java.util.ArrayList; -import java.util.List; -import java.util.function.UnaryOperator; - -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; - -public final class RulesHelper { - - private RulesHelper() {} - - static SimpleConjunction replaceTerm(SimpleConjunction conjunction, Term mainTerm, Term individualTerm) { - SimpleConjunction newSimpleConjunction = new SimpleConjunction(); - conjunction.getConjuncts().forEach(conjunct -> { - PositiveLiteral newLiteral = replaceTerm(conjunct, mainTerm, individualTerm); - newSimpleConjunction.add(newLiteral); - }); - return newSimpleConjunction; - } - - static PositiveLiteral replaceTerm(PositiveLiteral positiveLiteral, Term sourceTerm, Term targetTerm) { - - List arguments = positiveLiteral.getArguments(); - List modifiableArguments = replaceTerm(sourceTerm, targetTerm, arguments); - - return new PositiveLiteralImpl(positiveLiteral.getPredicate(), modifiableArguments); - } - - static List replaceTerm(Term sourceTerm, Term targetTerm, List terms) { - List newTerms = new ArrayList<>(terms); - - UnaryOperator replaceSourceTerm = term -> term.equals(sourceTerm) ? targetTerm : term; - newTerms.replaceAll(replaceSourceTerm); - - return newTerms; - } - -} diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java deleted file mode 100644 index f7cee4055..000000000 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/TestRulesHelper.java +++ /dev/null @@ -1,25 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -import static org.junit.Assert.assertEquals; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; - -public class TestRulesHelper { - - @Test - public void testReplaceTerm() { - AbstractConstant c1 = Expressions.makeAbstractConstant("c1"); - UniversalVariable v1 = Expressions.makeUniversalVariable("v1"); - UniversalVariable v2 = Expressions.makeUniversalVariable("v2"); - - PositiveLiteral positiveLiteral = Expressions.makePositiveLiteral("a", v1, v1, v2, c1); - - PositiveLiteral expectedLiteral = Expressions.makePositiveLiteral("a", c1, c1, v2, c1); - assertEquals(expectedLiteral, RulesHelper.replaceTerm(positiveLiteral, v1, c1)); - } - -} From 0e9868bc8091ad9699ae7ee4b524643fd7c42cec Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 11 Nov 2019 17:47:07 +0100 Subject: [PATCH 0606/1255] added getSyntacticRepresentation in every data model and moved any string manipulation in models to serializer --- .../picocli/VLog4jClientMaterialize.java | 1 - .../core/model/api/DataSourceDeclaration.java | 2 +- .../vlog4j/core/model/api/Entity.java | 32 ++++++++++++ .../vlog4j/core/model/api/Literal.java | 2 +- .../vlog4j/core/model/api/Predicate.java | 2 +- .../vlog4j/core/model/api/Rule.java | 2 +- .../vlog4j/core/model/api/Term.java | 2 +- .../implementation/AbstractConstantImpl.java | 14 ++++-- .../implementation/AbstractLiteralImpl.java | 6 ++- .../model/implementation/ConjunctionImpl.java | 6 ++- .../DataSourceDeclarationImpl.java | 6 ++- .../implementation/DatatypeConstantImpl.java | 8 ++- .../ExistentialVariableImpl.java | 6 ++- .../core/model/implementation/FactImpl.java | 2 +- .../LanguageStringConstantImpl.java | 8 ++- .../model/implementation/NamedNullImpl.java | 6 ++- .../model/implementation/PredicateImpl.java | 6 ++- .../core/model/implementation/RuleImpl.java | 7 ++- .../core/model/implementation/Serializer.java | 50 ++++++++++++++----- .../implementation/UniversalVariableImpl.java | 6 ++- .../reasoner/implementation/VLogReasoner.java | 23 ++++++--- 21 files changed, 154 insertions(+), 43 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index 7758a0065..969d7d0f7 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -37,7 +37,6 @@ import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; - import picocli.CommandLine.ArgGroup; import picocli.CommandLine.Command; import picocli.CommandLine.Option; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java index 58bea5785..dbd97c360 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement { +public interface DataSourceDeclaration extends Statement, SyntacticRepresentation{ /** * Returns the {@link Predicate} that this source applies to. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java new file mode 100644 index 000000000..a68806024 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -0,0 +1,32 @@ +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + +public interface SyntacticRepresentation { + + String getSyntacticRepresentation(); + + + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java index a5b5340d8..53d4980b9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java @@ -31,7 +31,7 @@ * @author david.carral@tu-dresden.de * @author Irina Dragoste */ -public interface Literal extends SyntaxObject { +public interface Literal extends SyntaxObject, SyntacticRepresentation { boolean isNegated(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index aaa126fa6..82a4126bb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -29,7 +29,7 @@ * @author Irina Dragoste * */ -public interface Predicate { +public interface Predicate extends SyntacticRepresentation { /** * The name of the Predicate. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java index a471a1cab..ebc937fed 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Rule extends SyntaxObject, Statement { +public interface Rule extends SyntaxObject, Statement, SyntacticRepresentation { /** * Returns the conjunction of head literals (the consequence of the rule). diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java index 1cf3fdbb8..11c2ef6e8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java @@ -27,7 +27,7 @@ * @author david.carral@tu-dresden.de * @author Markus Krötzsch */ -public interface Term { +public interface Term extends SyntacticRepresentation { /** * Returns the name this term. The name uniquely identifies terms of the same diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java index 86e97a061..140c6312e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java @@ -31,10 +31,10 @@ public class AbstractConstantImpl extends AbstractTermImpl implements AbstractConstant { /** - * Instantiates a {@code ConstantImpl} object with the name {@code name}. + * Instantiates a {@code ConstantImpl} object with the name + * {@code name}. * - * @param name - * cannot be a blank String (null, empty or whitespace). + * @param name cannot be a blank String (null, empty or whitespace). */ public AbstractConstantImpl(final String name) { super(name); @@ -44,9 +44,13 @@ public AbstractConstantImpl(final String name) { public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - + + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConstantString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java index 80bd418e6..a912bb6f6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java @@ -92,9 +92,13 @@ public boolean equals(final Object obj) { && this.getArguments().equals(other.getArguments()); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getLiteralString(this); + return getSyntacticRepresentation(); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index c28374ad7..3295d7d1a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -85,9 +85,13 @@ public Iterator iterator() { return getLiterals().iterator(); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConjunctionString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index a40eae7b7..ce0edbf1d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -83,9 +83,13 @@ public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getDataSourceDeclarationString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index 0f7d85135..9242ae321 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -58,14 +58,18 @@ public String getLexicalValue() { return this.lexicalValue; } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConstantString(this); + return getSyntacticRepresentation(); } @Override public String getName() { - return "\"" + lexicalValue.replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" + datatype + ">"; + return Serializer.getDatatypeConstantName(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java index a570bd615..33092db87 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java @@ -44,8 +44,12 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getExistentialVarString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index 63a931e99..2fc3d3d81 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -50,7 +50,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return Serializer.getLiteralString(this) + "."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index 7e8657216..31ca5e440 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return "\"" + string.replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + lang; + return Serializer.getLanguageStringConstantName(this); } @Override @@ -82,9 +82,13 @@ public boolean equals(Object obj) { return this.string.equals(other.getString()) && this.lang.equals(other.getLanguageTag()); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getConstantString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java index d6b1d2e9b..0b4e44640 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java @@ -46,8 +46,12 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getNamedNullString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java index 27d632c6d..85e9230df 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java @@ -85,9 +85,13 @@ public boolean equals(Object obj) { return this.arity == other.getArity() && this.name.equals(other.getName()); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getPredicateString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java index c83e79a30..8eabef7a1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java @@ -103,9 +103,14 @@ public boolean equals(final Object obj) { return this.head.equals(other.getHead()) && this.body.equals(other.getBody()); } + + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getRuleString(this); + return getSyntacticRepresentation(); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 8ab8b7455..68bf4e407 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -50,11 +50,11 @@ private Serializer() { } - public static String getRuleString(Rule rule) { - return getConjunctionString(rule.getHead()) + " :- " + getConjunctionString(rule.getBody()) + "."; + public static String getString(Rule rule) { + return getString(rule.getHead()) + " :- " + getString(rule.getBody()) + "."; } - public static String getLiteralString(Literal literal) { + public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { stringBuilder.append("~"); @@ -67,39 +67,55 @@ public static String getLiteralString(Literal literal) { } else { stringBuilder.append(", "); } - stringBuilder.append(term); + stringBuilder.append(term.getSyntacticRepresentation()); } stringBuilder.append(")"); return stringBuilder.toString(); } - public static String getConstantString(Constant constant) { + public static String getString(Fact fact) { + final StringBuilder stringBuilder = new StringBuilder(""); + stringBuilder.append(fact.getPredicate().getName()).append("("); + boolean first = true; + for (final Term term : fact.getArguments()) { + if (first) { + first = false; + } else { + stringBuilder.append(", "); + } + stringBuilder.append(term.getSyntacticRepresentation()); + } + stringBuilder.append(")."); + return stringBuilder.toString(); + } + + public static String getString(Constant constant) { return constant.getName(); } - public static String getExistentialVarString(ExistentialVariable existentialVariable) { + public static String getString(ExistentialVariable existentialVariable) { return "!" + existentialVariable.getName(); } - public static String getUniversalVarString(UniversalVariable universalVariable) { + public static String getString(UniversalVariable universalVariable) { return "?" + universalVariable.getName(); } - public static String getNamedNullString(NamedNull namedNull) { + public static String getString(NamedNull namedNull) { return "_" + namedNull.getName(); } - public static String getPredicateString(Predicate predicate) { + public static String getString(Predicate predicate) { return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; } - public static String getDataSourceDeclarationString(DataSourceDeclaration dataSourceDeclaration) { + public static String getString(DataSourceDeclaration dataSourceDeclaration) { return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + dataSourceDeclaration.getPredicate().getArity() + ") : " + dataSourceDeclaration.getDataSource().toConfigString() + " ."; } - public static String getConjunctionString(Conjunction conjunction) { + public static String getString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; for (final Literal literal : conjunction.getLiterals()) { @@ -108,9 +124,19 @@ public static String getConjunctionString(Conjunction conjunc } else { stringBuilder.append(", "); } - stringBuilder.append(getLiteralString(literal)); + stringBuilder.append(getString(literal)); } return stringBuilder.toString(); } + public static String getLanguageStringConstantName(LanguageStringConstant languageStringConstant) { + return "\"" + languageStringConstant.getString().replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + + languageStringConstant.getLanguageTag(); + } + + public static String getDatatypeConstantName(DatatypeConstant datatypeConstant) { + return "\"" + datatypeConstant.getLexicalValue().replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" + + datatypeConstant.getDatatype() + ">"; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java index 838eab7f3..82493488d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java @@ -44,8 +44,12 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + @Override public String toString() { - return Serializer.getUniversalVarString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index d355e17f2..89a530237 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; @@ -129,6 +130,15 @@ public boolean equals(Object obj) { return predicate.equals(other.predicate); } + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + + @Override + public String toString() { + return getSyntacticRepresentation(); + } + } /** @@ -740,19 +750,18 @@ public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementsAdded(statementsAdded); + + // updateCorrectnessOnStatementsAdded(statementsAdded); updateCorrectness(); } - @Override public void onStatementAdded(Statement statementAdded) { // TODO more elaborate materialisation state handling updateReasonerToKnowledgeBaseChanged(); - - //updateCorrectnessOnStatementAdded(statementAdded); + + // updateCorrectnessOnStatementAdded(statementAdded); updateCorrectness(); } @@ -766,9 +775,9 @@ private void updateReasonerToKnowledgeBaseChanged() { private void updateCorrectness() { if (this.reasonerState == ReasonerState.KB_CHANGED) { - + final boolean noRules = this.knowledgeBase.getRules().isEmpty(); - this.correctness = noRules? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; + this.correctness = noRules ? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; } } From 08642b206b0046927facba9a75796251f80f2b78 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 11 Nov 2019 18:06:35 +0100 Subject: [PATCH 0607/1255] added unit test for checking bug#104 --- .../vlog4j/owlapi/OwlAxiomToRulesConverterTest.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java index 6e10442ac..badeed248 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java @@ -631,6 +631,19 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } + + /* + * A \sqsubseteq <1 .R + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testSubClassOfMaxCardinality() { + + OWLClassExpression maxCard = df.getOWLObjectMaxCardinality(1, pR); + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, maxCard ); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } @Ignore public void test() { From 6849afdeaadddee075b633f98a96cbf0f04fbb5e Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 11 Nov 2019 19:23:53 +0100 Subject: [PATCH 0608/1255] added more coverage tests --- .../vlog4j/core/model/api/DataSourceDeclaration.java | 2 +- .../semanticweb/vlog4j/core/model/api/Entity.java | 2 +- .../semanticweb/vlog4j/core/model/api/Literal.java | 4 +++- .../semanticweb/vlog4j/core/model/api/Predicate.java | 2 +- .../org/semanticweb/vlog4j/core/model/api/Rule.java | 2 +- .../org/semanticweb/vlog4j/core/model/api/Term.java | 2 +- .../vlog4j/core/model/NegativeLiteralImplTest.java | 11 +++++++++++ .../vlog4j/core/model/PositiveLiteralImplTest.java | 12 ++++++++++++ .../vlog4j/core/model/PredicateImplTest.java | 6 ++++++ 9 files changed, 37 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java index dbd97c360..357c85ed8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement, SyntacticRepresentation{ +public interface DataSourceDeclaration extends Statement, Entity{ /** * Returns the {@link Predicate} that this source applies to. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index a68806024..e8b2bfcd9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -23,7 +23,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Serializer; -public interface SyntacticRepresentation { +public interface Entity { String getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java index 53d4980b9..fab5c530a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java @@ -22,6 +22,8 @@ import java.util.List; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for literals. A positive literal is simply an atomic formula, i.e., * a formula of the form P(t1,...,tn) where P is a {@link Predicate} of arity n @@ -31,7 +33,7 @@ * @author david.carral@tu-dresden.de * @author Irina Dragoste */ -public interface Literal extends SyntaxObject, SyntacticRepresentation { +public interface Literal extends SyntaxObject, Entity { boolean isNegated(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index 82a4126bb..87bd036c7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -29,7 +29,7 @@ * @author Irina Dragoste * */ -public interface Predicate extends SyntacticRepresentation { +public interface Predicate extends Entity { /** * The name of the Predicate. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java index ebc937fed..0eb6cc325 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Rule extends SyntaxObject, Statement, SyntacticRepresentation { +public interface Rule extends SyntaxObject, Statement, Entity { /** * Returns the conjunction of head literals (the consequence of the rule). diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java index 11c2ef6e8..582493c69 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java @@ -27,7 +27,7 @@ * @author david.carral@tu-dresden.de * @author Markus Krötzsch */ -public interface Term extends SyntacticRepresentation { +public interface Term extends Entity { /** * Returns the name this term. The name uniquely identifies terms of the same diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java index 8feba86e3..d22881e84 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java @@ -142,4 +142,15 @@ public void termSizeMatchesPredicateArity() { Expressions.makeUniversalVariable("X")); } + @Test + public void negativeLiteralTostringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Predicate predicateP = new PredicateImpl("p", 2); + final Literal atom2 = Expressions.makeNegativeLiteral("p", x, c); + final Literal atom3 = new NegativeLiteralImpl(predicateP, Arrays.asList(x, c)); + assertEquals("~p(?X, c)", atom2.toString()); + assertEquals("~p(?X, c)", atom3.toString()); + + } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java index c358cecdf..ff1d5bbe3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java @@ -139,4 +139,16 @@ public void termSizeMatchesPredicateArity() { Expressions.makeUniversalVariable("X")); } + @Test + public void positiveLiteralTostringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Predicate predicateP = new PredicateImpl("p", 2); + final Literal atom2 = Expressions.makePositiveLiteral("p", x, c); + final Literal atom3 = new PositiveLiteralImpl(predicateP, Arrays.asList(x, c)); + assertEquals("p(?X, c)", atom2.toString()); + assertEquals("p(?X, c)", atom3.toString()); + + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java index 30862eb5e..21f184915 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java @@ -73,4 +73,10 @@ public void arityZero() { new PredicateImpl("p", 0); } + @Test + public void predicateToStringTest() { + final Predicate p1 = new PredicateImpl("p", 1); + assertEquals(" Predicate [ name= p, arity= 1]", p1.toString()); + } + } From 02eae7f5413132fc1ff87930db4e7782b48165cc Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 11 Nov 2019 19:24:39 +0100 Subject: [PATCH 0609/1255] added more coverage tests --- .../java/org/semanticweb/vlog4j/core/model/api/Entity.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index e8b2bfcd9..c6cfd12f5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -20,13 +20,10 @@ * #L% */ - import org.semanticweb.vlog4j.core.model.implementation.Serializer; public interface Entity { - - String getSyntacticRepresentation(); - + String getSyntacticRepresentation(); } From fa461307bb6acfa3cc38ba227e4fd7fcc1f64ccd Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 12 Nov 2019 00:13:25 +0100 Subject: [PATCH 0610/1255] added toStrings for DataSourceDeclarations --- .../vlog4j/core/model/api/DataSource.java | 2 +- .../core/model/implementation/Serializer.java | 5 ++-- .../implementation/CsvFileDataSource.java | 5 ++++ .../implementation/InMemoryDataSource.java | 6 ++++ .../implementation/RdfFileDataSource.java | 5 ++++ .../SparqlQueryResultDataSource.java | 6 ++++ .../core/model/DataSourceDeclarationTest.java | 30 +++++++++++++++++++ 7 files changed, 55 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java index abaaa9d03..d085716e6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java @@ -26,7 +26,7 @@ * @author Irina Dragoste * */ -public interface DataSource { +public interface DataSource extends Entity { /** * Constructs a String representation of the data source. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 68bf4e407..0af0baefe 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -24,6 +24,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; @@ -110,9 +111,7 @@ public static String getString(Predicate predicate) { } public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" - + dataSourceDeclaration.getPredicate().getArity() + ") : " - + dataSourceDeclaration.getDataSource().toConfigString() + " ."; + return dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } public static String getString(Conjunction conjunction) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java index 6ffafa6d9..2fa42eb07 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java @@ -73,4 +73,9 @@ public String toString() { return "CsvFileDataSource [csvFile=" + getFile() + "]"; } + @Override + public String getSyntacticRepresentation() { + return "load-csv(\"" + getFile() + "\") ."; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 0a363f734..c602b613a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -101,4 +101,10 @@ public String toConfigString() { return null; } + @Override + public String getSyntacticRepresentation() { + // TODO Auto-generated method stub + return null; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java index 5b8ac21d1..ee5cc49ee 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java @@ -71,4 +71,9 @@ public String toString() { return "RdfFileDataSource [rdfFile=" + getFile() + "]"; } + @Override + public String getSyntacticRepresentation() { + return "load-rdf(\"" + getFile() + "\") ."; + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index c4f83be33..29578fbf2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -164,4 +164,10 @@ public String toString() { + ", queryBody=" + queryBody + "]"; } + @Override + public String getSyntacticRepresentation() { + return "Sparql(\"" + endpoint + "\"" + ", \"" + queryVariables + "\"" + + ", \"" + queryBody + "\") ."; + } + } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 3091216d5..ff34b2c5a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -22,15 +22,23 @@ import static org.junit.Assert.*; +import java.io.File; +import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; public class DataSourceDeclarationTest { @@ -74,4 +82,26 @@ public void dataSourceDecalarationToStringTest() throws MalformedURLException { DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); assertEquals(dataSourceDeclaration1.toString(), dataSourceDeclaration2.toString()); } + + @Test + public void DataSourceDeclarationToStringTest() throws IOException { + final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; + final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); + Predicate predicate1 = Expressions.makePredicate("p", 3); + Predicate predicate2 = Expressions.makePredicate("q", 1); + final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/"), + "var", "?var wdt:P31 wd:Q5 ."); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + unzippedCsvFileDataSource); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, + unzippedRdfFileDataSource); + assertEquals("Sparql(\"https://example.org/\", \"var\", \"?var wdt:P31 wd:Q5 .\") .", + dataSourceDeclaration1.toString()); + assertEquals("load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); + assertEquals("load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); + + } } From 3f82460a78382d9773aca8ef3c4ca676e0206d73 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 12 Nov 2019 19:57:33 +0100 Subject: [PATCH 0611/1255] added EntityTest class for tostring roundtrip tests --- .../vlog4j/core/model/api/Entity.java | 7 + .../core/model/implementation/Serializer.java | 4 +- .../SparqlQueryResultDataSource.java | 32 ++-- .../core/model/DataSourceDeclarationTest.java | 10 +- .../vlog4j/syntax/parser/EntityTest.java | 152 ++++++++++++++++++ 5 files changed, 182 insertions(+), 23 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index c6cfd12f5..73b5d05ec 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -22,6 +22,13 @@ import org.semanticweb.vlog4j.core.model.implementation.Serializer; +/** + * Interface with getSyntacticRepresentation method that must be extended by any + * data model that can be parsed in order to obtain its correct parsable string. + * + * @author Ali Elhalawati + * + */ public interface Entity { String getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 0af0baefe..a0673a697 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -111,7 +111,9 @@ public static String getString(Predicate predicate) { } public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); + return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + + dataSourceDeclaration.getPredicate().getArity() + "): " + + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } public static String getString(Conjunction conjunction) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index 29578fbf2..ace59318b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -46,12 +46,11 @@ public class SparqlQueryResultDataSource extends VLogDataSource { /** * Creates a data source from answers to a remote SPARQL query. * - * @param endpoint - * web location of the resource the query will be evaluated on - * @param queryVariables - * comma-separated list of SPARQL variable names (without leading ? or $) - * @param queryBody - * content of the WHERE clause in the SPARQL query + * @param endpoint web location of the resource the query will be + * evaluated on + * @param queryVariables comma-separated list of SPARQL variable names (without + * leading ? or $) + * @param queryBody content of the WHERE clause in the SPARQL query */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc @@ -70,15 +69,15 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl /** * Creates a data source from answers to a remote SPARQL query. * - * @param endpoint - * the web location of the resource the query will be evaluated on. - * @param queryVariables - * the variables of the query, in the given order. The variable at - * each position in the ordered set will be mapped to its - * correspondent query answer term at the same position. - * @param queryBody - * the content of the WHERE clause in the SPARQL query. Must - * not contain {@code newline} characters ({@code "\n")}. + * @param endpoint the web location of the resource the query will be + * evaluated on. + * @param queryVariables the variables of the query, in the given order. The + * variable at each position in the ordered set will be + * mapped to its correspondent query answer term at the + * same position. + * @param queryBody the content of the WHERE clause in the SPARQL + * query. Must not contain {@code newline} characters + * ({@code "\n")}. */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc @@ -166,8 +165,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return "Sparql(\"" + endpoint + "\"" + ", \"" + queryVariables + "\"" - + ", \"" + queryBody + "\") ."; + return "sparql(" + "<" + endpoint + ">" + ", \"" + queryVariables + "\"" + ", \"" + queryBody + "\") ."; } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index ff34b2c5a..e9ab57f5e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -89,8 +89,8 @@ public void DataSourceDeclarationToStringTest() throws IOException { final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); Predicate predicate1 = Expressions.makePredicate("p", 3); Predicate predicate2 = Expressions.makePredicate("q", 1); - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/"), - "var", "?var wdt:P31 wd:Q5 ."); + final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( + new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); @@ -98,10 +98,10 @@ public void DataSourceDeclarationToStringTest() throws IOException { unzippedCsvFileDataSource); final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, unzippedRdfFileDataSource); - assertEquals("Sparql(\"https://example.org/\", \"var\", \"?var wdt:P31 wd:Q5 .\") .", + assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", dataSourceDeclaration1.toString()); - assertEquals("load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); - assertEquals("load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); + assertEquals("@source q(1): load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); + assertEquals("@source q(1): load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java new file mode 100644 index 000000000..e669cf066 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -0,0 +1,152 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.net.URL; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.FactImpl; +import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.*; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +public class EntityTest { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Variable z = Expressions.makeExistentialVariable("Z"); + final Variable y2 = Expressions.makeUniversalVariable("Y"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Constant c = Expressions.makeAbstractConstant("c"); + final AbstractConstantImpl f = new AbstractConstantImpl("f"); + final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + final DatatypeConstantImpl data = new DatatypeConstantImpl("data", "http://example.org/mystring"); + final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, s); + final Predicate p = Expressions.makePredicate("p", 2); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(f, s)); + final Fact f2 = Expressions.makeFact("p", Arrays.asList(data, d)); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); + final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); + final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); + + @Test + public void factToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + KnowledgeBase kb2 = new KnowledgeBase(); + kb.addStatement(f1); + kb2.addStatement(f2); + assertEquals(f1.toString(), kb.getFacts().get(0).toString()); + assertEquals(f2.toString(), kb2.getFacts().get(0).toString()); + } + + @Test + public void literalToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule1); + assertEquals(headAtom1.toString(), rule1.getHead().getLiterals().get(0).toString()); + } + + @Test + public void conjunctionToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule1); + assertEquals(bodyConjunction.toString(), rule2.getBody().toString()); + } + + @Test + public void predicateToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule1); + assertEquals(bodyConjunction.getLiterals().get(0).getPredicate().toString(), p.toString()); + } + + @Test + public void ruleToStringRoundTripTest() { + KnowledgeBase kb = new KnowledgeBase(); + KnowledgeBase kb2 = new KnowledgeBase(); + kb.addStatement(rule1); + kb2.addStatement(rule2); + assertEquals(kb.getRules().get(0).toString(), rule1.toString()); + assertEquals(kb2.getRules().get(0).toString(), rule2.toString()); + } + + @Test + public void dataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + final String INPUT_FOLDER = "src/test/data/input/"; + final String csvFile = INPUT_FOLDER + "file.csv"; + final File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); + Predicate predicate1 = Expressions.makePredicate("p", 3); + Predicate predicate2 = Expressions.makePredicate("q", 1); + final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( + new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + unzippedCsvFileDataSource); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate1, + unzippedRdfFileDataSource); + System.out.println(dataSourceDeclaration1.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); + } + +} From c436c57e403f9967af2552d1d93a41d7dcc2cb47 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 13 Nov 2019 04:07:16 +0100 Subject: [PATCH 0612/1255] moved getSyntacticRepresentation to interfaces --- .../core/model/api/AbstractConstant.java | 7 +++++++ .../vlog4j/core/model/api/Conjunction.java | 9 ++++++++- .../core/model/api/DataSourceDeclaration.java | 9 ++++++++- .../core/model/api/DatatypeConstant.java | 7 +++++++ .../vlog4j/core/model/api/Entity.java | 11 +++++----- .../core/model/api/ExistentialVariable.java | 9 ++++++++- .../vlog4j/core/model/api/Fact.java | 7 +++++++ .../model/api/LanguageStringConstant.java | 7 +++++++ .../vlog4j/core/model/api/Literal.java | 5 +++++ .../vlog4j/core/model/api/NamedNull.java | 12 ++++++++--- .../vlog4j/core/model/api/Predicate.java | 7 +++++++ .../vlog4j/core/model/api/Rule.java | 7 +++++++ .../core/model/api/UniversalVariable.java | 7 +++++++ .../implementation/AbstractConstantImpl.java | 4 ---- .../implementation/AbstractLiteralImpl.java | 4 ---- .../model/implementation/ConjunctionImpl.java | 4 ---- .../DataSourceDeclarationImpl.java | 4 ---- .../implementation/DatatypeConstantImpl.java | 4 ---- .../ExistentialVariableImpl.java | 4 ---- .../core/model/implementation/FactImpl.java | 2 +- .../LanguageStringConstantImpl.java | 4 ---- .../model/implementation/NamedNullImpl.java | 4 ---- .../model/implementation/PredicateImpl.java | 4 ---- .../core/model/implementation/RuleImpl.java | 5 ----- .../core/model/implementation/Serializer.java | 20 ++----------------- .../implementation/UniversalVariableImpl.java | 4 ---- .../core/model/DataSourceDeclarationTest.java | 13 ------------ 27 files changed, 96 insertions(+), 88 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java index cc7783ba1..760b74358 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -34,4 +36,9 @@ default TermType getType() { return TermType.ABSTRACT_CONSTANT; } + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java index 4d02bc9b9..550d3fa4e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java @@ -22,6 +22,8 @@ import java.util.List; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of * (negated or positive) atomic formulas that are connected with logical AND. @@ -30,7 +32,7 @@ * @author Markus Krötzsch * */ -public interface Conjunction extends Iterable, SyntaxObject { +public interface Conjunction extends Iterable, SyntaxObject, Entity { /** * Returns the list of literals that are part of this conjunction. @@ -39,4 +41,9 @@ public interface Conjunction extends Iterable, SyntaxObjec */ List getLiterals(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java index 357c85ed8..954574e1f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -27,7 +29,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement, Entity{ +public interface DataSourceDeclaration extends Statement, Entity { /** * Returns the {@link Predicate} that this source applies to. @@ -42,4 +44,9 @@ public interface DataSourceDeclaration extends Statement, Entity{ * @return data source specification */ DataSource getDataSource(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java index 390e7a909..3702b7b52 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -57,4 +59,9 @@ default TermType getType() { * @return a non-null string */ String getLexicalValue(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index 73b5d05ec..8126a4ca3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -20,17 +20,18 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /** - * Interface with getSyntacticRepresentation method that must be extended by any - * data model that can be parsed in order to obtain its correct parsable string. + * Interface for every parsable Data model that has a string representation * * @author Ali Elhalawati * */ public interface Entity { - + /** + * returns the parsable String representation of an Entity. + * + * @return non-empty String + */ String getSyntacticRepresentation(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java index ad57ed712..d573a7850 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -32,5 +34,10 @@ public interface ExistentialVariable extends Variable { default TermType getType() { return TermType.EXISTENTIAL_VARIABLE; } - + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java index d9943f75e..36e7c1fef 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -29,4 +31,9 @@ */ public interface Fact extends PositiveLiteral, Statement { + @Override + default String getSyntacticRepresentation() { + return Serializer.getFactString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index a1c24427f..b3694d565 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -60,4 +62,9 @@ default String getDatatype() { * @return a non-empty string */ String getLanguageTag(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java index fab5c530a..791615697 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java @@ -52,4 +52,9 @@ public interface Literal extends SyntaxObject, Entity { */ List getArguments(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java index 4ca7fbecb..5413b9365 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /* * #%L * VLog4j Core Components @@ -28,11 +30,15 @@ * @author david.carral@tu-dresden.de */ public interface NamedNull extends Term { - + @Override default TermType getType() { return TermType.NAMED_NULL; } - -} + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index 87bd036c7..7b604f289 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -47,4 +49,9 @@ public interface Predicate extends Entity { */ int getArity(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java index 0eb6cc325..804524b80 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /*- * #%L * VLog4j Core Components @@ -46,4 +48,9 @@ public interface Rule extends SyntaxObject, Statement, Entity { */ Conjunction getBody(); + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java index 7827886b8..975620a03 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.api; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /* * #%L * VLog4j Core Components @@ -32,4 +34,9 @@ public interface UniversalVariable extends Variable { default TermType getType() { return TermType.UNIVERSAL_VARIABLE; } + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java index 140c6312e..0820e16de 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java @@ -45,10 +45,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java index a912bb6f6..5e2d141a3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java @@ -92,10 +92,6 @@ public boolean equals(final Object obj) { && this.getArguments().equals(other.getArguments()); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java index 3295d7d1a..8167f43c2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java @@ -85,10 +85,6 @@ public Iterator iterator() { return getLiterals().iterator(); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java index ce0edbf1d..2a905dcbb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java @@ -83,10 +83,6 @@ public T accept(StatementVisitor statementVisitor) { return statementVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index 9242ae321..d50693640 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -58,10 +58,6 @@ public String getLexicalValue() { return this.lexicalValue; } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java index 33092db87..685d273a9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java @@ -44,10 +44,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index 2fc3d3d81..d22794133 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -50,7 +50,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return Serializer.getString(this); + return getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index 31ca5e440..bb0df83c8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -82,10 +82,6 @@ public boolean equals(Object obj) { return this.string.equals(other.getString()) && this.lang.equals(other.getLanguageTag()); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java index 0b4e44640..5b3a0adc6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java @@ -46,10 +46,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java index 85e9230df..38fac8686 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java @@ -85,10 +85,6 @@ public boolean equals(Object obj) { return this.arity == other.getArity() && this.name.equals(other.getName()); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java index 8eabef7a1..4ffbae0de 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java @@ -103,11 +103,6 @@ public boolean equals(final Object obj) { return this.head.equals(other.getHead()) && this.body.equals(other.getBody()); } - - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a0673a697..b862c797d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.implementation; -import java.util.List; - /*- * #%L * VLog4j Core Components @@ -24,7 +22,6 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; @@ -36,7 +33,6 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.api.Variable; /** * A utility class with static methods to obtain the correct parsable string @@ -74,20 +70,8 @@ public static String getString(Literal literal) { return stringBuilder.toString(); } - public static String getString(Fact fact) { - final StringBuilder stringBuilder = new StringBuilder(""); - stringBuilder.append(fact.getPredicate().getName()).append("("); - boolean first = true; - for (final Term term : fact.getArguments()) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(term.getSyntacticRepresentation()); - } - stringBuilder.append(")."); - return stringBuilder.toString(); + public static String getFactString(Fact fact) { + return getString(fact) + "."; } public static String getString(Constant constant) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java index 82493488d..e21cf3e9c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java @@ -44,10 +44,6 @@ public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override public String toString() { return getSyntacticRepresentation(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index e9ab57f5e..982dcd8b3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -70,19 +70,6 @@ public void equalityTest() throws MalformedURLException { assertFalse(dataSourceDeclaration1.equals(null)); // written like this for recording coverage properly } - @Test - public void dataSourceDecalarationToStringTest() throws MalformedURLException { - DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - Predicate predicate1 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - Predicate predicate2 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); - assertEquals(dataSourceDeclaration1.toString(), dataSourceDeclaration2.toString()); - } - @Test public void DataSourceDeclarationToStringTest() throws IOException { final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; From b2c72128dfc7ddf1b41d6c912d5fcf741f94f2ab Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 13 Nov 2019 04:10:32 +0100 Subject: [PATCH 0613/1255] added javadoc --- .../java/org/semanticweb/vlog4j/core/model/api/Entity.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index 8126a4ca3..cc58a7806 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -21,14 +21,14 @@ */ /** - * Interface for every parsable Data model that has a string representation + * Interface for every parsable data model that has a string representation * * @author Ali Elhalawati * */ public interface Entity { /** - * returns the parsable String representation of an Entity. + * returns the parsable string representation of an Entity. * * @return non-empty String */ From 4eefbe4e87e105356d69867fb1c009ebebd1970a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Wed, 13 Nov 2019 18:42:32 +0100 Subject: [PATCH 0614/1255] add getExtensionSize method and some tests --- .../vlog4j/core/reasoner/Reasoner.java | 3 + .../reasoner/implementation/VLogReasoner.java | 18 +- .../implementation/ExtensionSizeTest.java | 206 ++++++++++++++++++ 3 files changed, 226 insertions(+), 1 deletion(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 6c274b820..e37561483 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -377,6 +377,9 @@ public static Reasoner getInstance() { */ long queryAnswerSize(PositiveLiteral query); + // TODO add javadoc, examples + long getExtensionSize(PositiveLiteral literal); + // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 33f1f428f..cb0b75f75 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -608,7 +608,7 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - int result = -1; + long result = -1; try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { @@ -621,6 +621,22 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { return result; } + @Override + public long getExtensionSize(PositiveLiteral literal) { + validateNotClosed(); + validateKBLoaded("Querying is not alowed before reasoner is loaded!"); + + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(literal); + + long result = 0; + try { + result = this.vLog.getExtensionSize(this.vLog.getPredicateId(vLogAtom.getPredicate())); + } catch (NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } + return result; + } + @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java new file mode 100644 index 000000000..8e16694b0 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java @@ -0,0 +1,206 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +public class ExtensionSizeTest { + + private static final Predicate predP = Expressions.makePredicate("P", 1); + private static final Predicate predQ = Expressions.makePredicate("Q", 1); + private static final Predicate predR = Expressions.makePredicate("R", 2); + private static final Variable x = Expressions.makeUniversalVariable("x"); + private static final Variable y = Expressions.makeExistentialVariable("y"); + private static final Constant c = Expressions.makeAbstractConstant("c"); + private static final Constant d = Expressions.makeAbstractConstant("d"); + private static final Constant e = Expressions.makeAbstractConstant("e"); + private static final Constant f = Expressions.makeAbstractConstant("f"); + + private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); + private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); + private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); + private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); + private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); + private static final Conjunction conPx = Expressions.makeConjunction(Px); + + private static final Rule QxPx = Expressions.makeRule(Qx, Px); + private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); + + private static final Fact factPc = Expressions.makeFact(predP, c); + private static final Fact factPd = Expressions.makeFact(predP, d); + + private static final Fact factQe = Expressions.makeFact(predQ, e); + private static final Fact factQf = Expressions.makeFact(predQ, f); + + private static final PositiveLiteral Rdy = Expressions.makePositiveLiteral(predR, d, y); + private static final PositiveLiteral Rxd = Expressions.makePositiveLiteral(predR, x, d); + private static final PositiveLiteral Rxe = Expressions.makePositiveLiteral(predR, x, e); + + @Test + public void noFactsnoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + } + } + + @Test + public void noFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(0, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void noFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(0, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsNoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(0, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void qFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void qFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.getExtensionSize(Px)); + assertEquals(2, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsQFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(4, reasoner.getExtensionSize(Qx)); + assertEquals(0, reasoner.getExtensionSize(Rxy)); + } + } + + @Test + public void pFactsQFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(4, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Rxy)); + + assertEquals(2, reasoner.getExtensionSize(Rdy)); + assertEquals(2, reasoner.getExtensionSize(Rxe)); + } + } + + @Test + public void pFactsQFactsExistentialAndUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.getExtensionSize(Px)); + assertEquals(6, reasoner.getExtensionSize(Qx)); + assertEquals(2, reasoner.getExtensionSize(Rxy)); + + assertEquals(2, reasoner.getExtensionSize(Rdy)); + assertEquals(2, reasoner.getExtensionSize(Rxd)); + } + } +} From 90ab9519265aa0cf4f03b3910ff9bb8f3f13762c Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 15 Nov 2019 15:23:54 +0100 Subject: [PATCH 0615/1255] modified round trip test --- .../vlog4j/syntax/parser/EntityTest.java | 44 ++++--------------- 1 file changed, 8 insertions(+), 36 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index e669cf066..bcf472459 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -86,44 +86,17 @@ public class EntityTest { final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); @Test - public void factToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - KnowledgeBase kb2 = new KnowledgeBase(); - kb.addStatement(f1); - kb2.addStatement(f2); - assertEquals(f1.toString(), kb.getFacts().get(0).toString()); - assertEquals(f2.toString(), kb2.getFacts().get(0).toString()); - } - - @Test - public void literalToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(rule1); - assertEquals(headAtom1.toString(), rule1.getHead().getLiterals().get(0).toString()); - } - - @Test - public void conjunctionToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(rule1); - assertEquals(bodyConjunction.toString(), rule2.getBody().toString()); + public void factToStringRoundTripTest() throws ParsingException { + assertEquals(RuleParser.parseFact(f1.toString()), RuleParser.parseFact("p(f, \"Test\"@en).")); + assertEquals(RuleParser.parseFact(f2.toString()), + RuleParser.parseFact("p(\"data\"^^, d).")); } @Test - public void predicateToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(rule1); - assertEquals(bodyConjunction.getLiterals().get(0).getPredicate().toString(), p.toString()); - } - - @Test - public void ruleToStringRoundTripTest() { - KnowledgeBase kb = new KnowledgeBase(); - KnowledgeBase kb2 = new KnowledgeBase(); - kb.addStatement(rule1); - kb2.addStatement(rule2); - assertEquals(kb.getRules().get(0).toString(), rule1.toString()); - assertEquals(kb2.getRules().get(0).toString(), rule2.toString()); + public void ruleToStringRoundTripTest() throws ParsingException { + assertEquals(RuleParser.parseRule(rule1.toString()), RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?X, ?Y).")); + assertEquals(RuleParser.parseRule(rule2.toString()), + RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).")); } @Test @@ -143,7 +116,6 @@ public void dataSourceDeclarationToStringParsingTest() throws ParsingException, unzippedCsvFileDataSource); final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); - System.out.println(dataSourceDeclaration1.toString()); RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); From 85c4addcd9d459218f90b51db8b7544c186cec8d Mon Sep 17 00:00:00 2001 From: alloka Date: Sun, 17 Nov 2019 17:18:22 +0100 Subject: [PATCH 0616/1255] added javadoc and removed unused iomports --- .../vlog4j/core/model/api/Entity.java | 2 +- .../implementation/DatatypeConstantImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../core/model/implementation/Serializer.java | 96 ++++++++++++++++++- .../core/model/DataSourceDeclarationTest.java | 8 +- .../vlog4j/core/model/FactTest.java | 3 +- .../core/model/PositiveLiteralImplTest.java | 1 + .../vlog4j/core/model/PredicateImplTest.java | 2 +- .../vlog4j/core/model/TermImplTest.java | 5 +- .../vlog4j/syntax/parser/EntityTest.java | 6 +- 10 files changed, 105 insertions(+), 22 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java index cc58a7806..d5fd0306e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java @@ -28,7 +28,7 @@ */ public interface Entity { /** - * returns the parsable string representation of an Entity. + * Returns the parsable string representation of an Entity. * * @return non-empty String */ diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java index d50693640..a366f7fed 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java @@ -65,7 +65,7 @@ public String toString() { @Override public String getName() { - return Serializer.getDatatypeConstantName(this); + return Serializer.getConstantName(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java index bb0df83c8..8d0bb26f3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return Serializer.getLanguageStringConstantName(this); + return Serializer.getConstantName(this); } @Override diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index b862c797d..45b0d5fcd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -42,15 +42,31 @@ * */ public final class Serializer { - + /** + * Constructor. + */ private Serializer() { } + /** + * Creates a String representation of a given {@link Rule}. Example: "p(?X) :- + * q(?X,?Y)." + * + * @param rule a {@link Rule} + * @return String representation corresponding to a given {@link Rule}. + */ public static String getString(Rule rule) { return getString(rule.getHead()) + " :- " + getString(rule.getBody()) + "."; } + /** + * Creates a String representation of a given {@link Literal}. Example: + * "~q(?X,?Y)" + * + * @param literal a {@link Literal} + * @return String representation corresponding to a given {@link Literal}. + */ public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { @@ -70,36 +86,92 @@ public static String getString(Literal literal) { return stringBuilder.toString(); } + /** + * Creates a String representation of a given {@link Fact}. Example: "q(a)." + * + * @param fact a {@link Fact} + * @return String representation corresponding to a given {@link Fact}. + */ public static String getFactString(Fact fact) { return getString(fact) + "."; } + /** + * Creates a String representation of a given {@link Constant}. Example: "c" + * + * @param constant a {@link Constant} + * @return String representation corresponding to a given {@link Constant}. + */ public static String getString(Constant constant) { return constant.getName(); } + /** + * Creates a String representation of a given {@link ExistentialVariable}. + * Example: "!X" + * + * @param existentialVariable a {@link ExistentialVariable} + * @return String representation corresponding to a given + * {@link ExistentialVariable}. + */ public static String getString(ExistentialVariable existentialVariable) { return "!" + existentialVariable.getName(); } + /** + * Creates a String representation of a given {@link UniversalVariable}. + * Example: "?X" + * + * @param universalVariable a {@link UniversalVariable} + * @return String representation corresponding to a given + * {@link UniversalVariable}. + */ public static String getString(UniversalVariable universalVariable) { return "?" + universalVariable.getName(); } + /** + * Creates a String representation of a given {@link NamedNull}. Example: "_123" + * + * @param namedNull a {@link NamedNull} + * @return String representation corresponding to a given {@link NamedNull}. + */ public static String getString(NamedNull namedNull) { return "_" + namedNull.getName(); } + /** + * Creates a String representation of a given {@link Predicate}. Example: "p(2)" + * + * @param predicate a {@link Predicate} + * @return String representation corresponding to a given {@link Predicate}. + */ public static String getString(Predicate predicate) { - return " Predicate [ name= " + predicate.getName() + ", arity= " + predicate.getArity() + "]"; + return predicate.getName() + "(" + predicate.getArity() + ")"; } + /** + * Creates a String representation of a given {@link DataSourceDeclaration}. + * Example: "@source p(3): sparql(, "var", "?var + * wdt:P31 wd:Q5 .") ." + * + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @return String representation corresponding to a given + * {@link DataSourceDeclaration}. + */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" + dataSourceDeclaration.getPredicate().getArity() + "): " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } + /** + * Creates a String representation of a given {@link Conjunction}. Example: + * "p(?X,?Y), ~q(a,?Z)" + * + * @param conjunction a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ public static String getString(Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; @@ -114,12 +186,28 @@ public static String getString(Conjunction conjunction) { return stringBuilder.toString(); } - public static String getLanguageStringConstantName(LanguageStringConstant languageStringConstant) { + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. Example: ""Test"@en" + * + * @param languageStringConstant a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(LanguageStringConstant languageStringConstant) { return "\"" + languageStringConstant.getString().replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + languageStringConstant.getLanguageTag(); } - public static String getDatatypeConstantName(DatatypeConstant datatypeConstant) { + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant}. Example: ""c"^^" + * + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(DatatypeConstant datatypeConstant) { return "\"" + datatypeConstant.getLexicalValue().replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" + datatypeConstant.getDatatype() + ">"; } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 982dcd8b3..9ef7ef77d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -19,21 +19,19 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; -import java.util.Arrays; -import java.util.LinkedHashSet; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index 7f39dd875..a94cdf86d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -19,8 +19,7 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.util.Arrays; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java index ff1d5bbe3..7c8d791a7 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertNotEquals; import java.util.Arrays; + import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java index 21f184915..0c750ebb1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java @@ -76,7 +76,7 @@ public void arityZero() { @Test public void predicateToStringTest() { final Predicate p1 = new PredicateImpl("p", 1); - assertEquals(" Predicate [ name= p, arity= 1]", p1.toString()); + assertEquals("p(1)", p1.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 2a218a6a2..5f6006c43 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -19,8 +19,9 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index bcf472459..718630cf5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -19,8 +19,7 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.io.File; import java.io.IOException; @@ -44,13 +43,10 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.*; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; From b49418f62343c554be0771265d1c4f743a40f7f4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 18 Nov 2019 12:02:36 +0100 Subject: [PATCH 0617/1255] fix variable name --- .../semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java index 13dab444a..09e788f6a 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java @@ -222,8 +222,8 @@ void startAxiomConversion() { */ void addSubClassAxiom(final OWLClassExpression subClass, final OWLClassExpression superClass) { if (subClass instanceof OWLObjectOneOf) { - final OWLObjectOneOf subClassInstaceOf = (OWLObjectOneOf) subClass; - subClassInstaceOf.individuals().forEach(individual -> visitClassAssertionAxiom(individual, superClass)); + final OWLObjectOneOf subClassObjectOneOf = (OWLObjectOneOf) subClass; + subClassObjectOneOf.individuals().forEach(individual -> visitClassAssertionAxiom(individual, superClass)); } else { this.startAxiomConversion(); From 0e21460295d302e8c0fd625d611112b52c12a7ce Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 01:04:49 +0100 Subject: [PATCH 0618/1255] fixed DataypeConstants and added their roundtrip tests --- .../core/model/implementation/Serializer.java | 98 +++++++++++++------ .../reasoner/implementation/VLogReasoner.java | 10 +- .../vlog4j/core/model/TermImplTest.java | 5 +- .../vlog4j/syntax/parser/EntityTest.java | 1 + .../vlog4j/syntax/parser/RuleParserTest.java | 18 ++++ 5 files changed, 94 insertions(+), 38 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 45b0d5fcd..bf205b95f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -30,6 +30,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; @@ -42,6 +43,16 @@ * */ public final class Serializer { + public static final String negativeIdentifier = "~"; + public static final String comma = ","; + public static final String dot = "."; + public static final String existentialIdentifier = "!"; + public static final String universalIdentifier = "?"; + public static final String namedNullIdentifier = "_"; + public static final String openBracket = "("; + public static final String closeBracket = ")"; + public static final String ruleSeparator = ":-"; + /** * Constructor. */ @@ -50,55 +61,58 @@ private Serializer() { } /** - * Creates a String representation of a given {@link Rule}. Example: "p(?X) :- - * q(?X,?Y)." + * Creates a String representation of a given {@link Rule}. * - * @param rule a {@link Rule} + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. + * */ public static String getString(Rule rule) { - return getString(rule.getHead()) + " :- " + getString(rule.getBody()) + "."; + return getString(rule.getHead()) + " " + ruleSeparator + " " + getString(rule.getBody()) + dot; } /** - * Creates a String representation of a given {@link Literal}. Example: - * "~q(?X,?Y)" + * Creates a String representation of a given {@link Literal}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { - stringBuilder.append("~"); + stringBuilder.append(negativeIdentifier); } - stringBuilder.append(literal.getPredicate().getName()).append("("); + stringBuilder.append(literal.getPredicate().getName()).append(openBracket); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { first = false; } else { - stringBuilder.append(", "); + stringBuilder.append(comma + " "); } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(")"); + stringBuilder.append(closeBracket); return stringBuilder.toString(); } /** - * Creates a String representation of a given {@link Fact}. Example: "q(a)." + * Creates a String representation of a given {@link Fact}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ public static String getFactString(Fact fact) { - return getString(fact) + "."; + return getString(fact) + dot; } /** - * Creates a String representation of a given {@link Constant}. Example: "c" + * Creates a String representation of a given {@link Constant}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ @@ -106,69 +120,74 @@ public static String getString(Constant constant) { return constant.getName(); } + public static String getString(DatatypeConstant constant) { + return getShortConstantName(constant); + } + /** * Creates a String representation of a given {@link ExistentialVariable}. - * Example: "!X" * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. */ public static String getString(ExistentialVariable existentialVariable) { - return "!" + existentialVariable.getName(); + return existentialIdentifier + existentialVariable.getName(); } /** * Creates a String representation of a given {@link UniversalVariable}. - * Example: "?X" * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. */ public static String getString(UniversalVariable universalVariable) { - return "?" + universalVariable.getName(); + return universalIdentifier + universalVariable.getName(); } /** - * Creates a String representation of a given {@link NamedNull}. Example: "_123" + * Creates a String representation of a given {@link NamedNull}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ public static String getString(NamedNull namedNull) { - return "_" + namedNull.getName(); + return namedNullIdentifier + namedNull.getName(); } /** - * Creates a String representation of a given {@link Predicate}. Example: "p(2)" + * Creates a String representation of a given {@link Predicate}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + "(" + predicate.getArity() + ")"; + return predicate.getName() + openBracket + predicate.getArity() + closeBracket; } /** * Creates a String representation of a given {@link DataSourceDeclaration}. - * Example: "@source p(3): sparql(, "var", "?var - * wdt:P31 wd:Q5 .") ." * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + "(" - + dataSourceDeclaration.getPredicate().getArity() + "): " + return "@source " + dataSourceDeclaration.getPredicate().getName() + openBracket + + dataSourceDeclaration.getPredicate().getArity() + closeBracket + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } /** - * Creates a String representation of a given {@link Conjunction}. Example: - * "p(?X,?Y), ~q(a,?Z)" + * Creates a String representation of a given {@link Conjunction}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -179,7 +198,7 @@ public static String getString(Conjunction conjunction) { if (first) { first = false; } else { - stringBuilder.append(", "); + stringBuilder.append(comma + " "); } stringBuilder.append(getString(literal)); } @@ -188,8 +207,9 @@ public static String getString(Conjunction conjunction) { /** * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. Example: ""Test"@en" + * {@link LanguageStringConstant}. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -201,8 +221,26 @@ public static String getConstantName(LanguageStringConstant languageStringConsta /** * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant}. Example: ""c"^^" + * {@link DatatypeConstant} without an IRI. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getShortConstantName(DatatypeConstant datatypeConstant) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { + return "\"" + datatypeConstant.getLexicalValue() + "\""; + } else { + return datatypeConstant.getLexicalValue(); + } + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 89a530237..6bda54351 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -11,6 +11,7 @@ import java.util.Map; import java.util.Set; +import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; @@ -28,7 +29,6 @@ import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; @@ -130,13 +130,11 @@ public boolean equals(Object obj) { return predicate.equals(other.predicate); } + @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - @Override - public String toString() { - return getSyntacticRepresentation(); + throw new NotImplementedException( + "This method is not implemented for type LocalFactsDataSourceDeclaration"); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 5f6006c43..c34cefd8b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -26,6 +26,7 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.TermType; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -141,8 +142,8 @@ public void abstractConstantToStringTest() { @Test public void datatypeConstantToStringTest() { - DatatypeConstantImpl c = new DatatypeConstantImpl("c", "http://example.org/mystring"); - assertEquals("\"c\"^^", c.toString()); + DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); + assertEquals("\"c\"", c.toString()); } @Test diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 718630cf5..8d6121926 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -31,6 +31,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index de2fc73b6..7896177e2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -411,4 +412,21 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } + @Test + public void DatatypeConstantgRoundTripTest() throws ParsingException { + DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); + DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); + DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); + DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); + assertEquals(datatypeConstantString, + RuleParser.parseFact("p(\"data\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantInteger, + RuleParser.parseFact("p(\"1\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantFloat, + RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantDouble, + RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); + + } + } From 613f7b9e4f3d101d582bca538db805f64791a254 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 12:53:55 +0100 Subject: [PATCH 0619/1255] added roundtrip tests for datatypeConstants --- .../core/model/implementation/Serializer.java | 10 ++++++++- .../vlog4j/core/model/TermImplTest.java | 6 ++--- .../vlog4j/syntax/parser/EntityTest.java | 22 +++++++++++++++++-- 3 files changed, 32 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index bf205b95f..cfe79dc6f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -232,7 +232,15 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { return "\"" + datatypeConstant.getLexicalValue() + "\""; } else { - return datatypeConstant.getLexicalValue(); + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_FLOAT)) { + return datatypeConstant.getLexicalValue(); + } else { + return getConstantName(datatypeConstant); + } + } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index c34cefd8b..81bf20c96 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -97,10 +97,10 @@ public void abstractConstantGetterTest() { @Test public void datatypeConstantGetterTest() { - DatatypeConstant c = new DatatypeConstantImpl("c", "http://example.org/mystring"); + DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); assertEquals("c", c.getLexicalValue()); - assertEquals("http://example.org/mystring", c.getDatatype()); - assertEquals("\"c\"^^", c.getName()); + assertEquals("http://www.w3.org/2001/XMLSchema#string", c.getDatatype()); + assertEquals("\"c\"^^", c.getName()); assertEquals(TermType.DATATYPE_CONSTANT, c.getType()); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 8d6121926..62039c855 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -37,6 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -62,7 +63,7 @@ public class EntityTest { final Constant c = Expressions.makeAbstractConstant("c"); final AbstractConstantImpl f = new AbstractConstantImpl("f"); final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); - final DatatypeConstantImpl data = new DatatypeConstantImpl("data", "http://example.org/mystring"); + final DatatypeConstantImpl data = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); @@ -86,7 +87,7 @@ public class EntityTest { public void factToStringRoundTripTest() throws ParsingException { assertEquals(RuleParser.parseFact(f1.toString()), RuleParser.parseFact("p(f, \"Test\"@en).")); assertEquals(RuleParser.parseFact(f2.toString()), - RuleParser.parseFact("p(\"data\"^^, d).")); + RuleParser.parseFact("p(\"data\"^^, d).")); } @Test @@ -96,6 +97,23 @@ public void ruleToStringRoundTripTest() throws ParsingException { RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).")); } + @Test + public void DatatypeConstantgRoundTripTest() throws ParsingException { + DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); + DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); + DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); + DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); + assertEquals("\"data\"", RuleParser.parseFact("p(\"data\"^^).") + .getArguments().get(0).toString()); + assertEquals("1", RuleParser.parseFact("p(\"1\"^^).").getArguments() + .get(0).toString()); + assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).").getArguments() + .get(0).toString()); + assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).") + .getArguments().get(0).toString()); + + } + @Test public void dataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); From 81bca7044ceea22a6dc4f595a41c916636f200df Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 15:21:45 +0100 Subject: [PATCH 0620/1255] added roundtrip tests --- .../core/model/implementation/Serializer.java | 17 +- .../vlog4j/syntax/parser/EntityTest.java | 202 ++++++++++++------ .../vlog4j/syntax/parser/RuleParserTest.java | 3 + 3 files changed, 150 insertions(+), 72 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index cfe79dc6f..f53ca7088 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -49,8 +49,8 @@ public final class Serializer { public static final String existentialIdentifier = "!"; public static final String universalIdentifier = "?"; public static final String namedNullIdentifier = "_"; - public static final String openBracket = "("; - public static final String closeBracket = ")"; + public static final String openParentheses = "("; + public static final String closeParentheses = ")"; public static final String ruleSeparator = ":-"; /** @@ -84,7 +84,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(negativeIdentifier); } - stringBuilder.append(literal.getPredicate().getName()).append(openBracket); + stringBuilder.append(literal.getPredicate().getName()).append(openParentheses); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -94,7 +94,7 @@ public static String getString(Literal literal) { } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closeBracket); + stringBuilder.append(closeParentheses); return stringBuilder.toString(); } @@ -167,7 +167,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openBracket + predicate.getArity() + closeBracket; + return predicate.getName() + openParentheses + predicate.getArity() + closeParentheses; } /** @@ -179,8 +179,8 @@ public static String getString(Predicate predicate) { * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + openBracket - + dataSourceDeclaration.getPredicate().getArity() + closeBracket + ": " + return "@source " + dataSourceDeclaration.getPredicate().getName() + openParentheses + + dataSourceDeclaration.getPredicate().getArity() + closeParentheses + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } @@ -234,8 +234,7 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_FLOAT)) { + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { return datatypeConstant.getLexicalValue(); } else { return getConstantName(datatypeConstant); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 62039c855..88d01b2f7 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -25,25 +25,20 @@ import java.io.IOException; import java.net.URL; import java.util.Arrays; -import java.util.List; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; @@ -55,85 +50,166 @@ import org.semanticweb.vlog4j.parser.RuleParser; public class EntityTest { - final Variable x = Expressions.makeUniversalVariable("X"); - final Variable y = Expressions.makeUniversalVariable("Y"); - final Variable z = Expressions.makeExistentialVariable("Z"); + final Variable y2 = Expressions.makeUniversalVariable("Y"); final Constant d = Expressions.makeAbstractConstant("d"); - final Constant c = Expressions.makeAbstractConstant("c"); - final AbstractConstantImpl f = new AbstractConstantImpl("f"); + final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); - final DatatypeConstantImpl data = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); - final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); - final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); - final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); - final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); - final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); - final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); - final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); - final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, s); - final Predicate p = Expressions.makePredicate("p", 2); - final Fact f1 = Expressions.makeFact(p, Arrays.asList(f, s)); - final Fact f2 = Expressions.makeFact("p", Arrays.asList(data, d)); - final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, - NegativeLiteral, PositiveLiteral4); - final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); - final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); - final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); - final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); - final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); + + // final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", + // x, c); + // final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", + // y2, x); + // final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", + // x, d); + // final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", + // x, d); + // final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", + // c, s); + // final List LiteralList = Arrays.asList(positiveLiteral1, + // positiveLiteral2, positiveLiteral3, + // NegativeLiteral, PositiveLiteral4); + + // final Conjunction bodyConjunction = new + // ConjunctionImpl<>(LiteralList); + + // final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); @Test - public void factToStringRoundTripTest() throws ParsingException { - assertEquals(RuleParser.parseFact(f1.toString()), RuleParser.parseFact("p(f, \"Test\"@en).")); - assertEquals(RuleParser.parseFact(f2.toString()), - RuleParser.parseFact("p(\"data\"^^, d).")); + public void languageStringConstantToStringRoundTripTest() throws ParsingException { + LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + Predicate p = Expressions.makePredicate("p", 1); + Fact f3 = Expressions.makeFact(p, Arrays.asList(s)); + assertEquals(f3, RuleParser.parseFact(f3.toString())); + } + + public void AbstractConstantToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl f = new AbstractConstantImpl("f"); + AbstractConstantImpl a = new AbstractConstantImpl("1"); + Predicate p = Expressions.makePredicate("p", 1); + Fact f1 = Expressions.makeFact(p, Arrays.asList(f)); + Fact f2 = Expressions.makeFact(p, Arrays.asList(a)); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + assertEquals(f2, RuleParser.parseFact(f2.toString())); } @Test public void ruleToStringRoundTripTest() throws ParsingException { - assertEquals(RuleParser.parseRule(rule1.toString()), RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?X, ?Y).")); - assertEquals(RuleParser.parseRule(rule2.toString()), - RuleParser.parseRule("q(?X, !Z) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).")); + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable y = Expressions.makeUniversalVariable("Y"); + Variable z = Expressions.makeExistentialVariable("Z"); + PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + } + + @Test + public void ConjunctionToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable y = Expressions.makeUniversalVariable("Y"); + Variable z = Expressions.makeExistentialVariable("Z"); + NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); + PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals(rule1, RuleParser.parseRule(rule1.toString())); } @Test - public void DatatypeConstantgRoundTripTest() throws ParsingException { - DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); - DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); - DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); - DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); - assertEquals("\"data\"", RuleParser.parseFact("p(\"data\"^^).") + public void LiteralToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable z = Expressions.makeExistentialVariable("Z"); + NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Rule rule1 = Expressions.makeRule(headAtom1, atom1); + assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + } + + @Test + public void DatatypeDoubleConstantToStringRoundTripTest() throws ParsingException { + String shortDoubleConstant = "12.345E67"; + assertEquals(shortDoubleConstant, + RuleParser.parseFact("p(\"12.345E67\"^^).").getArguments() + .get(0).toString()); + assertEquals(shortDoubleConstant, RuleParser.parseFact("p(12.345E67).").getArguments().get(0).toString()); + } + + @Test + public void DatatypeFloatConstantToStringRoundTripTest() throws ParsingException { + String floatConstant = "\"0.5\"^^"; + assertEquals(floatConstant, RuleParser.parseFact("p(\"0.5\"^^).") .getArguments().get(0).toString()); - assertEquals("1", RuleParser.parseFact("p(\"1\"^^).").getArguments() - .get(0).toString()); - assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).").getArguments() - .get(0).toString()); - assertEquals("0.5", RuleParser.parseFact("p(\"0.5\"^^).") + } + + @Test + public void DatatypeStringConstantToStringRoundTripTest() throws ParsingException { + String shortStringConstant = "\"data\""; + assertEquals(shortStringConstant, RuleParser + .parseFact("p(\"data\"^^).").getArguments().get(0).toString()); + assertEquals(shortStringConstant, RuleParser.parseFact("p(\"data\").").getArguments().get(0).toString()); + } + + @Test + public void DatatypeIntegerConstantToStringRoundTripTest() throws ParsingException { + String shortIntegerConstant = "1"; + assertEquals(shortIntegerConstant, RuleParser.parseFact("p(\"1\"^^).") .getArguments().get(0).toString()); + assertEquals(shortIntegerConstant, RuleParser.parseFact("p(1).").getArguments().get(0).toString()); + } + @Test + public void DatatypeDecimalToStringRoundTripTest() throws ParsingException { + String shortDecimalConstant = "0.23"; + assertEquals(shortDecimalConstant, + RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0) + .toString()); + assertEquals(shortDecimalConstant, RuleParser.parseFact("p(0.23).").getArguments().get(0).toString()); } @Test - public void dataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - final String INPUT_FOLDER = "src/test/data/input/"; - final String csvFile = INPUT_FOLDER + "file.csv"; - final File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); Predicate predicate1 = Expressions.makePredicate("p", 3); - Predicate predicate2 = Expressions.makePredicate("q", 1); - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( - new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); - final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, - unzippedCsvFileDataSource); - final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate1, - unzippedRdfFileDataSource); + SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), + "var", "?var wdt:P31 wd:Q5 ."); + DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); + assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + String INPUT_FOLDER = "src/test/data/input/"; + File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); + Predicate predicate1 = Expressions.makePredicate("p", 3); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedRdfFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + String INPUT_FOLDER = "src/test/data/input/"; + String csvFile = INPUT_FOLDER + "file.csv"; + Predicate predicate1 = Expressions.makePredicate("q", 1); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedCsvFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 7896177e2..c5d4b7ecb 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -418,6 +418,7 @@ public void DatatypeConstantgRoundTripTest() throws ParsingException { DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); + DatatypeConstantImpl datatypeConstantDecimal = new DatatypeConstantImpl("0.23", PrefixDeclarations.XSD_DECIMAL); assertEquals(datatypeConstantString, RuleParser.parseFact("p(\"data\"^^).").getArguments().get(0)); assertEquals(datatypeConstantInteger, @@ -426,6 +427,8 @@ public void DatatypeConstantgRoundTripTest() throws ParsingException { RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); assertEquals(datatypeConstantDouble, RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); + assertEquals(datatypeConstantDecimal, + RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0)); } From 4613849a75bed0ca9905a44b998dc64f54b98924 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 15:22:52 +0100 Subject: [PATCH 0621/1255] added roundtrip tests --- .../vlog4j/syntax/parser/EntityTest.java | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 88d01b2f7..176277d5c 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -51,30 +51,6 @@ public class EntityTest { - final Variable y2 = Expressions.makeUniversalVariable("Y"); - final Constant d = Expressions.makeAbstractConstant("d"); - - final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); - - // final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", - // x, c); - // final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", - // y2, x); - // final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", - // x, d); - // final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", - // x, d); - // final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", - // c, s); - // final List LiteralList = Arrays.asList(positiveLiteral1, - // positiveLiteral2, positiveLiteral3, - // NegativeLiteral, PositiveLiteral4); - - // final Conjunction bodyConjunction = new - // ConjunctionImpl<>(LiteralList); - - // final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); - @Test public void languageStringConstantToStringRoundTripTest() throws ParsingException { LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); From 0289cb3e8932803dbe326e1fd0b2438982afe409 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 16:36:00 +0100 Subject: [PATCH 0622/1255] modified test methods names --- .../vlog4j/syntax/parser/EntityTest.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 176277d5c..b0460357f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -25,7 +25,6 @@ import java.io.IOException; import java.net.URL; import java.util.Arrays; - import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; @@ -59,7 +58,7 @@ public void languageStringConstantToStringRoundTripTest() throws ParsingExceptio assertEquals(f3, RuleParser.parseFact(f3.toString())); } - public void AbstractConstantToStringRoundTripTest() throws ParsingException { + public void abstractConstantToStringRoundTripTest() throws ParsingException { AbstractConstantImpl f = new AbstractConstantImpl("f"); AbstractConstantImpl a = new AbstractConstantImpl("1"); Predicate p = Expressions.makePredicate("p", 1); @@ -85,7 +84,7 @@ public void ruleToStringRoundTripTest() throws ParsingException { } @Test - public void ConjunctionToStringRoundTripTest() throws ParsingException { + public void conjunctionToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); Variable x = Expressions.makeUniversalVariable("X"); Variable y = Expressions.makeUniversalVariable("Y"); @@ -100,7 +99,7 @@ public void ConjunctionToStringRoundTripTest() throws ParsingException { } @Test - public void LiteralToStringRoundTripTest() throws ParsingException { + public void literalToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); Variable x = Expressions.makeUniversalVariable("X"); Variable z = Expressions.makeExistentialVariable("Z"); @@ -111,7 +110,7 @@ public void LiteralToStringRoundTripTest() throws ParsingException { } @Test - public void DatatypeDoubleConstantToStringRoundTripTest() throws ParsingException { + public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingException { String shortDoubleConstant = "12.345E67"; assertEquals(shortDoubleConstant, RuleParser.parseFact("p(\"12.345E67\"^^).").getArguments() @@ -120,14 +119,14 @@ public void DatatypeDoubleConstantToStringRoundTripTest() throws ParsingExceptio } @Test - public void DatatypeFloatConstantToStringRoundTripTest() throws ParsingException { + public void datatypeFloatConstantToStringRoundTripTest() throws ParsingException { String floatConstant = "\"0.5\"^^"; assertEquals(floatConstant, RuleParser.parseFact("p(\"0.5\"^^).") .getArguments().get(0).toString()); } @Test - public void DatatypeStringConstantToStringRoundTripTest() throws ParsingException { + public void datatypeStringConstantToStringRoundTripTest() throws ParsingException { String shortStringConstant = "\"data\""; assertEquals(shortStringConstant, RuleParser .parseFact("p(\"data\"^^).").getArguments().get(0).toString()); @@ -135,7 +134,7 @@ public void DatatypeStringConstantToStringRoundTripTest() throws ParsingExceptio } @Test - public void DatatypeIntegerConstantToStringRoundTripTest() throws ParsingException { + public void datatypeIntegerConstantToStringRoundTripTest() throws ParsingException { String shortIntegerConstant = "1"; assertEquals(shortIntegerConstant, RuleParser.parseFact("p(\"1\"^^).") .getArguments().get(0).toString()); @@ -143,7 +142,7 @@ public void DatatypeIntegerConstantToStringRoundTripTest() throws ParsingExcepti } @Test - public void DatatypeDecimalToStringRoundTripTest() throws ParsingException { + public void datatypeDecimalToStringRoundTripTest() throws ParsingException { String shortDecimalConstant = "0.23"; assertEquals(shortDecimalConstant, RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0) From 0a6b92523d70bb2a507758cbf13f9a0443c82795 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 19 Nov 2019 17:04:02 +0100 Subject: [PATCH 0623/1255] added getSyntacticRepresentation for InMemoryDataSource --- .../core/model/implementation/Serializer.java | 8 ++++---- .../implementation/InMemoryDataSource.java | 16 +++++++++++++--- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index f53ca7088..c489abfc1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -50,7 +50,7 @@ public final class Serializer { public static final String universalIdentifier = "?"; public static final String namedNullIdentifier = "_"; public static final String openParentheses = "("; - public static final String closeParentheses = ")"; + public static final String closingParentheses = ")"; public static final String ruleSeparator = ":-"; /** @@ -94,7 +94,7 @@ public static String getString(Literal literal) { } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closeParentheses); + stringBuilder.append(closingParentheses); return stringBuilder.toString(); } @@ -167,7 +167,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openParentheses + predicate.getArity() + closeParentheses; + return predicate.getName() + openParentheses + predicate.getArity() + closingParentheses; } /** @@ -180,7 +180,7 @@ public static String getString(Predicate predicate) { */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { return "@source " + dataSourceDeclaration.getPredicate().getName() + openParentheses - + dataSourceDeclaration.getPredicate().getArity() + closeParentheses + ": " + + dataSourceDeclaration.getPredicate().getArity() + closingParentheses + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index c602b613a..ca59534dd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -96,13 +96,23 @@ public String[][] getData() { * Returns null to indicate that this {@link DataSource} cannot be passed to * VLog in a configuration string. */ + @Override - public String toConfigString() { - return null; + public String getSyntacticRepresentation() { + + String message = "\\\\ This data source holds facts: \n"; + StringBuilder facts = new StringBuilder(""); + facts.append(message); + for (int i = 0; i < this.getData().length; i++) { + for (int j = 0; j < data[i].length; j++) { + facts.append(data[i][j] + "\n"); + } + } + return facts.toString(); } @Override - public String getSyntacticRepresentation() { + public String toConfigString() { // TODO Auto-generated method stub return null; } From e8d859eb4c6c1f1d4a9e6ba9e622e8321619cc38 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 20 Nov 2019 00:02:01 +0100 Subject: [PATCH 0624/1255] fixed grammer --- .../core/model/implementation/Serializer.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index c489abfc1..2b66ec484 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -49,8 +49,8 @@ public final class Serializer { public static final String existentialIdentifier = "!"; public static final String universalIdentifier = "?"; public static final String namedNullIdentifier = "_"; - public static final String openParentheses = "("; - public static final String closingParentheses = ")"; + public static final String openParenthesis = "("; + public static final String closingParenthesis = ")"; public static final String ruleSeparator = ":-"; /** @@ -84,7 +84,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(negativeIdentifier); } - stringBuilder.append(literal.getPredicate().getName()).append(openParentheses); + stringBuilder.append(literal.getPredicate().getName()).append(openParenthesis); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -94,7 +94,7 @@ public static String getString(Literal literal) { } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closingParentheses); + stringBuilder.append(closingParenthesis); return stringBuilder.toString(); } @@ -167,7 +167,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openParentheses + predicate.getArity() + closingParentheses; + return predicate.getName() + openParenthesis + predicate.getArity() + closingParenthesis; } /** @@ -179,8 +179,8 @@ public static String getString(Predicate predicate) { * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + openParentheses - + dataSourceDeclaration.getPredicate().getArity() + closingParentheses + ": " + return "@source " + dataSourceDeclaration.getPredicate().getName() + openParenthesis + + dataSourceDeclaration.getPredicate().getArity() + closingParenthesis + ": " + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } From aafbdcd87e865f0e7c15dddb843368dcf61a0a0a Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 21 Nov 2019 22:56:16 +0100 Subject: [PATCH 0625/1255] added strings identifiers --- .../core/model/implementation/Serializer.java | 63 +++++++++++-------- .../implementation/InMemoryDataSource.java | 16 ++--- .../reasoner/implementation/VLogReasoner.java | 3 +- 3 files changed, 46 insertions(+), 36 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 2b66ec484..92cfe77ff 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -43,15 +43,22 @@ * */ public final class Serializer { - public static final String negativeIdentifier = "~"; - public static final String comma = ","; - public static final String dot = "."; - public static final String existentialIdentifier = "!"; - public static final String universalIdentifier = "?"; - public static final String namedNullIdentifier = "_"; - public static final String openParenthesis = "("; - public static final String closingParenthesis = ")"; - public static final String ruleSeparator = ":-"; + public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String COMMA = ", "; + public static final String DOT = "."; + public static final String EXISTENTIAL_IDENTIFIER = "!"; + public static final String UNIVERSAL_IDENTIFIER = "?"; + public static final String NAMEDNULL_IDENTIFIER = "_"; + public static final String OPEN_PARENTHESIS = "("; + public static final String CLOSING_PARENTHESIS = ")"; + public static final String RULE_SEPARATOR = " :- "; + public static final String AT = "@"; + public static final String SOURCE = "@source "; + public static final String COLON = ": "; + public static final String CARET = "^"; + public static final String LESS_THAN = "<"; + public static final String MORE_THAN = ">"; + public static final String ESCAPED_QUOTE = "\""; /** * Constructor. @@ -60,6 +67,10 @@ private Serializer() { } + private static String escape(String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\""); + } + /** * Creates a String representation of a given {@link Rule}. * @@ -69,7 +80,7 @@ private Serializer() { * */ public static String getString(Rule rule) { - return getString(rule.getHead()) + " " + ruleSeparator + " " + getString(rule.getBody()) + dot; + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + DOT; } /** @@ -82,19 +93,19 @@ public static String getString(Rule rule) { public static String getString(Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { - stringBuilder.append(negativeIdentifier); + stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(literal.getPredicate().getName()).append(openParenthesis); + stringBuilder.append(literal.getPredicate().getName()).append(OPEN_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { first = false; } else { - stringBuilder.append(comma + " "); + stringBuilder.append(COMMA); } stringBuilder.append(term.getSyntacticRepresentation()); } - stringBuilder.append(closingParenthesis); + stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); } @@ -106,7 +117,7 @@ public static String getString(Literal literal) { * @return String representation corresponding to a given {@link Fact}. */ public static String getFactString(Fact fact) { - return getString(fact) + dot; + return getString(fact) + DOT; } /** @@ -133,7 +144,7 @@ public static String getString(DatatypeConstant constant) { * {@link ExistentialVariable}. */ public static String getString(ExistentialVariable existentialVariable) { - return existentialIdentifier + existentialVariable.getName(); + return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); } /** @@ -145,7 +156,7 @@ public static String getString(ExistentialVariable existentialVariable) { * {@link UniversalVariable}. */ public static String getString(UniversalVariable universalVariable) { - return universalIdentifier + universalVariable.getName(); + return UNIVERSAL_IDENTIFIER + universalVariable.getName(); } /** @@ -156,7 +167,7 @@ public static String getString(UniversalVariable universalVariable) { * @return String representation corresponding to a given {@link NamedNull}. */ public static String getString(NamedNull namedNull) { - return namedNullIdentifier + namedNull.getName(); + return NAMEDNULL_IDENTIFIER + namedNull.getName(); } /** @@ -167,7 +178,7 @@ public static String getString(NamedNull namedNull) { * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { - return predicate.getName() + openParenthesis + predicate.getArity() + closingParenthesis; + return predicate.getName() + OPEN_PARENTHESIS + predicate.getArity() + CLOSING_PARENTHESIS; } /** @@ -179,8 +190,8 @@ public static String getString(Predicate predicate) { * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return "@source " + dataSourceDeclaration.getPredicate().getName() + openParenthesis - + dataSourceDeclaration.getPredicate().getArity() + closingParenthesis + ": " + return SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS + + dataSourceDeclaration.getPredicate().getArity() + CLOSING_PARENTHESIS + COLON + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } @@ -198,7 +209,7 @@ public static String getString(Conjunction conjunction) { if (first) { first = false; } else { - stringBuilder.append(comma + " "); + stringBuilder.append(COMMA); } stringBuilder.append(getString(literal)); } @@ -215,7 +226,7 @@ public static String getString(Conjunction conjunction) { * {@link LanguageStringConstant}. */ public static String getConstantName(LanguageStringConstant languageStringConstant) { - return "\"" + languageStringConstant.getString().replace("\\", "\\\\").replace("\"", "\\\"") + "\"@" + return ESCAPED_QUOTE + escape(languageStringConstant.getString()) + ESCAPED_QUOTE + AT + languageStringConstant.getLanguageTag(); } @@ -230,7 +241,7 @@ public static String getConstantName(LanguageStringConstant languageStringConsta */ public static String getShortConstantName(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return "\"" + datatypeConstant.getLexicalValue() + "\""; + return ESCAPED_QUOTE + datatypeConstant.getLexicalValue() + ESCAPED_QUOTE; } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -253,8 +264,8 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(DatatypeConstant datatypeConstant) { - return "\"" + datatypeConstant.getLexicalValue().replace("\\", "\\\\").replace("\"", "\\\"") + "\"^^<" - + datatypeConstant.getDatatype() + ">"; + return ESCAPED_QUOTE + escape(datatypeConstant.getLexicalValue()) + ESCAPED_QUOTE + CARET + CARET + LESS_THAN + + datatypeConstant.getDatatype() + MORE_THAN; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index ca59534dd..8c005a782 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -92,28 +92,28 @@ public String[][] getData() { } } - /** - * Returns null to indicate that this {@link DataSource} cannot be passed to - * VLog in a configuration string. - */ - @Override public String getSyntacticRepresentation() { - String message = "\\\\ This data source holds facts: \n"; + String message = "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"; StringBuilder facts = new StringBuilder(""); facts.append(message); for (int i = 0; i < this.getData().length; i++) { for (int j = 0; j < data[i].length; j++) { - facts.append(data[i][j] + "\n"); + facts.append(data[i][j] + " "); } + facts.append("\n"); } return facts.toString(); } + /** + * Returns null to indicate that this {@link DataSource} cannot be passed to + * VLog in a configuration string. + */ + @Override public String toConfigString() { - // TODO Auto-generated method stub return null; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 6bda54351..b0c123859 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -11,7 +11,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; @@ -133,7 +132,7 @@ public boolean equals(Object obj) { @Override public String getSyntacticRepresentation() { - throw new NotImplementedException( + throw new UnsupportedOperationException( "This method is not implemented for type LocalFactsDataSourceDeclaration"); } From 3db65a2da653181efa1d1b1dde7032cfc8bbff45 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 25 Nov 2019 14:53:00 +0100 Subject: [PATCH 0626/1255] rework after review: - make getters and setters and isValid methods public - print message and exit instead of throwing a RuntimeException --- .../client/picocli/PrintQueryResults.java | 30 ++--- .../org/vlog4j/client/picocli/SaveModel.java | 32 ++--- .../client/picocli/SaveQueryResults.java | 59 ++++----- .../picocli/VLog4jClientMaterialize.java | 115 +++++++++--------- 4 files changed, 118 insertions(+), 118 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java index cd32d547a..f80c226f2 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java @@ -30,7 +30,7 @@ */ public class PrintQueryResults { - static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true.\n Exiting the program."; + static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true."; /** * If true, Vlog4jClient will print the size of the query result. Mutually @@ -53,7 +53,7 @@ public class PrintQueryResults { public PrintQueryResults() { } - public PrintQueryResults(boolean sizeOnly, boolean complete) { + public PrintQueryResults(final boolean sizeOnly, final boolean complete) { this.sizeOnly = sizeOnly; this.complete = complete; } @@ -64,28 +64,28 @@ public PrintQueryResults(boolean sizeOnly, boolean complete) { * * @return @code{true} if configuration is valid. */ - protected boolean isValid() { - return !sizeOnly || !complete; + public boolean isValid() { + return !this.sizeOnly || !this.complete; } - protected void printConfiguration() { - System.out.println(" --print-query-result-size: " + sizeOnly); - System.out.println(" --print-complete-query-result: " + complete); + public boolean isSizeOnly() { + return this.sizeOnly; } - protected boolean isSizeOnly() { - return sizeOnly; - } - - protected void setSizeOnly(boolean sizeOnly) { + public void setSizeOnly(final boolean sizeOnly) { this.sizeOnly = sizeOnly; } - protected boolean isComplete() { - return complete; + public boolean isComplete() { + return this.complete; } - protected void setComplete(boolean complete) { + public void setComplete(final boolean complete) { this.complete = complete; } + + void printConfiguration() { + System.out.println(" --print-query-result-size: " + this.sizeOnly); + System.out.println(" --print-complete-query-result: " + this.complete); + } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java index b74a9b43a..6cb05f4b7 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java @@ -32,8 +32,8 @@ */ public class SaveModel { - static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required.\nExiting the program."; - static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path.\nExiting the program."; + static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path."; /** * If true, Vlog4jClient will save the model in {@code --output-model-directory} @@ -54,7 +54,7 @@ public class SaveModel { public SaveModel() { } - public SaveModel(boolean saveModel, String outputDir) { + public SaveModel(final boolean saveModel, final String outputDir) { this.saveModel = saveModel; this.outputModelDirectory = outputDir; } @@ -65,8 +65,8 @@ public SaveModel(boolean saveModel, String outputDir) { * * @return @code{true} if configuration is valid. */ - protected boolean isConfigurationValid() { - return !saveModel || (outputModelDirectory != null && !outputModelDirectory.isEmpty()); + public boolean isConfigurationValid() { + return !this.saveModel || ((this.outputModelDirectory != null) && !this.outputModelDirectory.isEmpty()); } /** @@ -74,17 +74,17 @@ protected boolean isConfigurationValid() { * * @return @code{true} if conditions are satisfied. */ - protected boolean isDirectoryValid() { - File file = new File(outputModelDirectory); + public boolean isDirectoryValid() { + final File file = new File(this.outputModelDirectory); return !file.exists() || file.isDirectory(); } /** * Create directory to store the model */ - public void mkdir() { - if (saveModel) { - File file = new File(outputModelDirectory); + void mkdir() { + if (this.saveModel) { + final File file = new File(this.outputModelDirectory); if (!file.exists()) { file.mkdirs(); } @@ -92,23 +92,23 @@ public void mkdir() { } public void printConfiguration() { - System.out.println(" --save-model: " + saveModel); - System.out.println(" --output-model-directory: " + outputModelDirectory); + System.out.println(" --save-model: " + this.saveModel); + System.out.println(" --output-model-directory: " + this.outputModelDirectory); } public boolean isSaveModel() { - return saveModel; + return this.saveModel; } - public void setSaveModel(boolean saveModel) { + public void setSaveModel(final boolean saveModel) { this.saveModel = saveModel; } public String getOutputModelDirectory() { - return outputModelDirectory; + return this.outputModelDirectory; } - public void setOutputModelDirectory(String outputModelDirectory) { + public void setOutputModelDirectory(final String outputModelDirectory) { this.outputModelDirectory = outputModelDirectory; } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java index a0c20f91f..735b63a4b 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java @@ -32,8 +32,8 @@ */ public class SaveQueryResults { - static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required.\nExiting the program."; - static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path.\nExiting the program."; + static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path."; /** * If true, Vlog4jClient will save the query result in @@ -56,7 +56,7 @@ public class SaveQueryResults { public SaveQueryResults() { } - public SaveQueryResults(boolean saveResults, String outputDir) { + public SaveQueryResults(final boolean saveResults, final String outputDir) { this.saveResults = saveResults; this.outputQueryResultDirectory = outputDir; } @@ -67,8 +67,9 @@ public SaveQueryResults(boolean saveResults, String outputDir) { * * @return @code{true} if configuration is valid. */ - protected boolean isConfigurationValid() { - return !saveResults || (outputQueryResultDirectory != null && !outputQueryResultDirectory.isEmpty()); + public boolean isConfigurationValid() { + return !this.saveResults + || ((this.outputQueryResultDirectory != null) && !this.outputQueryResultDirectory.isEmpty()); } /** @@ -77,43 +78,43 @@ protected boolean isConfigurationValid() { * * @return @code{true} if conditions are satisfied. */ - protected boolean isDirectoryValid() { - File file = new File(outputQueryResultDirectory); + public boolean isDirectoryValid() { + final File file = new File(this.outputQueryResultDirectory); return !file.exists() || file.isDirectory(); } - /** - * Create directory to store query results if not present. It assumes that - * configuration and directory are valid. - */ - protected void mkdir() { - if (saveResults) { - File file = new File(outputQueryResultDirectory); - if (!file.exists()) { - file.mkdirs(); - } - } + public boolean isSaveResults() { + return this.saveResults; } - protected void printConfiguration() { - System.out.println(" --save-query-results: " + saveResults); - System.out.println(" --output-query-result-directory: " + outputQueryResultDirectory); + public void setSaveResults(final boolean saveResults) { + this.saveResults = saveResults; } - protected boolean isSaveResults() { - return saveResults; + public String getOutputQueryResultDirectory() { + return this.outputQueryResultDirectory; } - protected void setSaveResults(boolean saveResults) { - this.saveResults = saveResults; + public void setOutputQueryResultDirectory(final String outputQueryResultDirectory) { + this.outputQueryResultDirectory = outputQueryResultDirectory; } - protected String getOutputQueryResultDirectory() { - return outputQueryResultDirectory; + /** + * Create directory to store query results if not present. It assumes that + * configuration and directory are valid. + */ + void mkdir() { + if (this.saveResults) { + final File file = new File(this.outputQueryResultDirectory); + if (!file.exists()) { + file.mkdirs(); + } + } } - protected void setOutputQueryResultDirectory(String outputQueryResultDirectory) { - this.outputQueryResultDirectory = outputQueryResultDirectory; + void printConfiguration() { + System.out.println(" --save-query-results: " + this.saveResults); + System.out.println(" --output-query-result-directory: " + this.outputQueryResultDirectory); } } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index 75973312f..e2c0f790c 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -53,7 +53,7 @@ public class VLog4jClientMaterialize implements Runnable { private final List queries = new ArrayList<>(); @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar} syntax", required = true) - private List ruleFiles = new ArrayList<>(); + private final List ruleFiles = new ArrayList<>(); // TODO // Support graal rule files @@ -61,25 +61,25 @@ public class VLog4jClientMaterialize implements Runnable { // private List graalRuleFiles = new ArrayList<>(); @Option(names = "--log-level", description = "Log level of VLog (c++ library). One of: DEBUG, INFO, WARNING (default), ERROR.", required = false) - private LogLevel logLevel = LogLevel.WARNING; + private final LogLevel logLevel = LogLevel.WARNING; @Option(names = "--log-file", description = "Log file of VLog (c++ library). VLog will log to the default system output by default", required = false) private String logFile; @Option(names = "--chase-algorithm", description = "Chase algorithm. RESTRICTED_CHASE (default) or SKOLEM_CHASE.", required = false) - private Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; + private final Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; @Option(names = "--timeout", description = "Timeout in seconds. Infinite by default", required = false) - private int timeout = 0; + private final int timeout = 0; @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. Vlog4jClient will print the size of its extension", required = true) - private List queryStrings = new ArrayList<>(); + private final List queryStrings = new ArrayList<>(); @ArgGroup(exclusive = false) - private PrintQueryResults printQueryResults = new PrintQueryResults(); + private final PrintQueryResults printQueryResults = new PrintQueryResults(); @ArgGroup(exclusive = false) - private SaveQueryResults saveQueryResults = new SaveQueryResults(); + private final SaveQueryResults saveQueryResults = new SaveQueryResults(); // TODO // @ArgGroup(exclusive = false) @@ -101,7 +101,7 @@ public void run() { /* Print configuration */ this.printConfiguration(); - try (Reasoner reasoner = new VLogReasoner(kb)) { + try (Reasoner reasoner = new VLogReasoner(this.kb)) { this.materialize(reasoner); // TODO if (saveModel.saveModel) { this.saveModel(); } @@ -112,14 +112,14 @@ public void run() { } private void validateConfiguration() { - if (!printQueryResults.isValid()) { - printMessageAndExit(PrintQueryResults.configurationErrorMessage); + if (!this.printQueryResults.isValid()) { + this.printErrorMessageAndExit(PrintQueryResults.configurationErrorMessage); } - if (!saveQueryResults.isConfigurationValid()) { - printMessageAndExit(SaveQueryResults.configurationErrorMessage); + if (!this.saveQueryResults.isConfigurationValid()) { + this.printErrorMessageAndExit(SaveQueryResults.configurationErrorMessage); } - if (saveQueryResults.isSaveResults() && !saveQueryResults.isDirectoryValid()) { - printMessageAndExit(SaveQueryResults.wrongDirectoryErrorMessage); + if (this.saveQueryResults.isSaveResults() && !this.saveQueryResults.isDirectoryValid()) { + this.printErrorMessageAndExit(SaveQueryResults.wrongDirectoryErrorMessage); } // TODO // if (!saveModel.isConfigurationValid()) { @@ -131,25 +131,23 @@ private void validateConfiguration() { } private void configureRules() { - for (String ruleFile : ruleFiles) { + for (final String ruleFile : this.ruleFiles) { try { - RuleParser.parseInto(kb, new FileInputStream(ruleFile)); - } catch (FileNotFoundException e1) { - throw new RuntimeException( - "File not found: " + ruleFile + ". " + e1.getMessage() + "\nExiting the program."); - } catch (ParsingException e2) { - throw new RuntimeException( - "Failed to parse rule file: " + ruleFile + ". " + e2.getMessage() + "\nExiting the program."); + RuleParser.parseInto(this.kb, new FileInputStream(ruleFile)); + } catch (final FileNotFoundException e1) { + this.printErrorMessageAndExit("File not found: " + ruleFile + "\n " + e1.getMessage()); + } catch (final ParsingException e2) { + this.printErrorMessageAndExit("Failed to parse rule file: " + ruleFile + "\n " + e2.getMessage()); } } } private void configureQueries() { - for (String queryString : queryStrings) { + for (final String queryString : this.queryStrings) { try { final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); - queries.add(query); - } catch (ParsingException e) { + this.queries.add(query); + } catch (final ParsingException e) { System.err.println("Failed to parse query: \"\"\"" + queryString + "\"\"\"."); System.err.println(e.getMessage()); System.err.println("The query was skipped. Continuing ..."); @@ -157,43 +155,43 @@ private void configureQueries() { } } - private void materialize(Reasoner reasoner) { + private void materialize(final Reasoner reasoner) { // logFile - reasoner.setLogFile(logFile); + reasoner.setLogFile(this.logFile); // logLevel - reasoner.setLogLevel(logLevel); + reasoner.setLogLevel(this.logLevel); // chaseAlgorithm - reasoner.setAlgorithm(chaseAlgorithm); + reasoner.setAlgorithm(this.chaseAlgorithm); // timeout - if (timeout > 0) { - reasoner.setReasoningTimeout(timeout); + if (this.timeout > 0) { + reasoner.setReasoningTimeout(this.timeout); } System.out.println("Executing the chase ..."); try { reasoner.reason(); - } catch (IOException e) { - throw new RuntimeException( - "Something went wrong. Please check the log file." + e.getMessage() + "\nExiting the program."); + } catch (final IOException e) { + this.printErrorMessageAndExit( + "Something went wrong during reasoning. Please check the reasoner log file.\n" + e.getMessage()); } } // TODO private void saveModel() {...} - private void answerQueries(Reasoner reasoner) { - if (!queries.isEmpty()) { + private void answerQueries(final Reasoner reasoner) { + if (!this.queries.isEmpty()) { System.out.println("Answering queries ..."); - for (PositiveLiteral query : queries) { - if (saveQueryResults.isSaveResults()) { + for (final PositiveLiteral query : this.queries) { + if (this.saveQueryResults.isSaveResults()) { // Save the query results - doSaveQueryResults(reasoner, query); + this.doSaveQueryResults(reasoner, query); } - if (printQueryResults.isSizeOnly()) { + if (this.printQueryResults.isSizeOnly()) { // print number of facts in results - doPrintResults(reasoner, query); - } else if (printQueryResults.isComplete()) { + this.doPrintResults(reasoner, query); + } else if (this.printQueryResults.isComplete()) { // print facts ExamplesUtils.printOutQueryAnswers(query, reasoner); } @@ -204,46 +202,47 @@ private void answerQueries(Reasoner reasoner) { private void printConfiguration() { System.out.println("Configuration:"); - for (String ruleFile : ruleFiles) { + for (final String ruleFile : this.ruleFiles) { System.out.println(" --rule-file: " + ruleFile); } - for (PositiveLiteral query : queries) { + for (final PositiveLiteral query : this.queries) { System.out.println(" --query: " + query); } - System.out.println(" --log-file: " + logFile); - System.out.println(" --log-level: " + logLevel); - System.out.println(" --chase-algorithm: " + chaseAlgorithm); - System.out.println(" --timeout: " + ((timeout > 0) ? timeout : "none")); + System.out.println(" --log-file: " + this.logFile); + System.out.println(" --log-level: " + this.logLevel); + System.out.println(" --chase-algorithm: " + this.chaseAlgorithm); + System.out.println(" --timeout: " + ((this.timeout > 0) ? this.timeout : "none")); /* Print what to do with the result */ - printQueryResults.printConfiguration(); - saveQueryResults.printConfiguration(); + this.printQueryResults.printConfiguration(); + this.saveQueryResults.printConfiguration(); // TODO saveModel.printConfiguration(); } - private void doSaveQueryResults(Reasoner reasoner, PositiveLiteral query) { - saveQueryResults.mkdir(); + private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral query) { + this.saveQueryResults.mkdir(); try { - reasoner.exportQueryAnswersToCsv(query, queryOputputPath(query), true); - } catch (IOException e) { + reasoner.exportQueryAnswersToCsv(query, this.queryOputputPath(query), true); + } catch (final IOException e) { System.err.println("Can't save query: \"\"\"" + query + "\"\"\"."); System.err.println(e.getMessage()); } } - private void doPrintResults(Reasoner reasoner, PositiveLiteral query) { + private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { System.out.println( "Number of query answers in " + query + ": " + ExamplesUtils.getQueryAnswerCount(query, reasoner)); } - private String queryOputputPath(PositiveLiteral query) { - return saveQueryResults.getOutputQueryResultDirectory() + "/" + query + ".csv"; + private String queryOputputPath(final PositiveLiteral query) { + return this.saveQueryResults.getOutputQueryResultDirectory() + "/" + query + ".csv"; } - private void printMessageAndExit(String message) { + private void printErrorMessageAndExit(final String message) { System.err.println(message); + System.out.println("Exiting the program."); System.exit(1); } From 85b669255a098e4872e212cfd23f31fb5da81c9d Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 25 Nov 2019 16:50:24 +0100 Subject: [PATCH 0627/1255] extract default directory names into constants --- .../org/vlog4j/client/picocli/SaveModel.java | 7 +- .../client/picocli/SaveQueryResults.java | 6 +- .../client/picocli/PrintQueryResultsTest.java | 104 +++++++----------- .../vlog4j/client/picocli/SaveModelTest.java | 99 +++++++++-------- .../client/picocli/SaveQueryResultsTest.java | 67 ++--------- 5 files changed, 115 insertions(+), 168 deletions(-) diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java index 6cb05f4b7..fb03cf117 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java @@ -32,6 +32,8 @@ */ public class SaveModel { + public static final String DEFAULT_OUTPUT_DIR_NAME = "model"; + static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required."; static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path."; @@ -48,8 +50,9 @@ public class SaveModel { * * @default "model" */ - @Option(names = "--output-model-directory", description = "Directory to store the model. Used only if --store-model is true. \"model\" by default.") - private String outputModelDirectory = "model"; + @Option(names = "--output-model-directory", description = "Directory to store the model. Used only if --store-model is true. \"" + + DEFAULT_OUTPUT_DIR_NAME + "\" by default.") + private String outputModelDirectory = DEFAULT_OUTPUT_DIR_NAME; public SaveModel() { } diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java index 735b63a4b..441aa359f 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java @@ -31,6 +31,7 @@ * */ public class SaveQueryResults { + public static final String DEFAULT_OUTPUT_DIR_NAME = "query-results"; static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required."; static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path."; @@ -50,8 +51,9 @@ public class SaveQueryResults { * * @default query-results */ - @Option(names = "--output-query-result-directory", description = "Directory to store the model. Used only if --save-query-results is true. \"query-results\" by default.") - private String outputQueryResultDirectory = "query-results"; + @Option(names = "--output-query-result-directory", description = "Directory to store the model. Used only if --save-query-results is true. \"" + + DEFAULT_OUTPUT_DIR_NAME + "\" by default.") + private String outputQueryResultDirectory = DEFAULT_OUTPUT_DIR_NAME; public SaveQueryResults() { } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java index 686fe7506..13469c358 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -1,43 +1,22 @@ package org.vlog4j.client.picocli; -/*- - * #%L - * VLog4j Client - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.io.ByteArrayOutputStream; -import java.io.PrintStream; - import org.junit.Test; -import org.vlog4j.client.picocli.PrintQueryResults; public class PrintQueryResultsTest { - String outputConfigurationBase = " --print-query-result-size: %b\n --print-complete-query-result: %b\n"; - private final PrintQueryResults sizeTrueCompleteTrue = new PrintQueryResults(true, true); - private final PrintQueryResults sizeTrueCompleteFalse = new PrintQueryResults(true, false); - private final PrintQueryResults sizeFalseCompleteTrue = new PrintQueryResults(false, true); - private final PrintQueryResults sizeFalseCompleteFalse = new PrintQueryResults(false, false); + private static final PrintQueryResults sizeTrueCompleteTrue = new PrintQueryResults(); + private static final PrintQueryResults sizeTrueCompleteFalse = new PrintQueryResults(); + private static final PrintQueryResults sizeFalseCompleteTrue = new PrintQueryResults(false, true); + private static final PrintQueryResults sizeFalseCompleteFalse = new PrintQueryResults(); + static { + sizeTrueCompleteTrue.setComplete(true); + sizeFalseCompleteFalse.setSizeOnly(false); + } + @Test public void isValid_sizeTrueCompleteFalse_valid() { // default configuration @@ -45,13 +24,13 @@ public void isValid_sizeTrueCompleteFalse_valid() { } @Test - public void isValid_sizeFalseCompleteTrue_valid() { - assertTrue(sizeFalseCompleteTrue.isValid()); + public void isValid_sizeTrueCompleteTrue_notValid() { + assertFalse(sizeTrueCompleteTrue.isValid()); } @Test - public void isValid_sizeTrueCompleteTrue_notValid() { - assertFalse(sizeTrueCompleteTrue.isValid()); + public void isValid_sizeFalseCompleteTrue_valid() { + assertTrue(sizeFalseCompleteTrue.isValid()); } @Test @@ -60,27 +39,43 @@ public void isValid_sizeFalseCompleteFalse_valid() { } @Test - public void printConfiguration_sizeTrueCompleteFalse() { - assertEquals(String.format(outputConfigurationBase, true, false), - captureOutputPrintConfiguration(sizeTrueCompleteFalse)); + public void isSizeOnly_sizeFalseCompleteTrue() { + assertFalse(sizeFalseCompleteTrue.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeTrueCompleteTrue() { + assertTrue(sizeTrueCompleteTrue.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeTrueCompleteFalse() { + assertTrue(sizeTrueCompleteFalse.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeFalseCompleteFalse() { + assertFalse(sizeFalseCompleteFalse.isSizeOnly()); + } + + @Test + public void isComplete_sizeTrueCompleteFalse() { + assertFalse(sizeTrueCompleteFalse.isComplete()); } @Test - public void printConfiguration_sizeFalseCompleteTrue() { - assertEquals(String.format(outputConfigurationBase, false, true), - captureOutputPrintConfiguration(sizeFalseCompleteTrue)); + public void isComplete_sizeTrueCompleteTrue() { + assertTrue(sizeTrueCompleteTrue.isComplete()); } @Test - public void printConfiguration_sizeTrueCompleteTrue() { - assertEquals(String.format(outputConfigurationBase, true, true), - captureOutputPrintConfiguration(sizeTrueCompleteTrue)); + public void isComplete_sizeFalseCompleteTrue() { + assertTrue(sizeFalseCompleteTrue.isComplete()); } @Test - public void printConfiguration_sizeFalseCompleteFalse() { - assertEquals(String.format(outputConfigurationBase, false, false), - captureOutputPrintConfiguration(sizeFalseCompleteFalse)); + public void isComplete_sizeFalseCompleteFalse() { + assertFalse(sizeFalseCompleteFalse.isComplete()); } @Test @@ -101,21 +96,4 @@ public void setComplete_and_isComplete() { assertTrue(prq.isComplete()); } - private String captureOutputPrintConfiguration(PrintQueryResults prq) { - // Output Variables - ByteArrayOutputStream result = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(result); - // Save default System.out - PrintStream systemOut = System.out; - // Change System.out - System.setOut(ps); - // Do something - prq.printConfiguration(); - // Restore previous state - System.out.flush(); - System.setOut(systemOut); - // return result - return result.toString(); - } - } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java index 53d80e267..5be484df7 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java @@ -5,11 +5,10 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; -import java.io.PrintStream; +import org.apache.commons.lang3.StringUtils; import org.junit.Rule; /*- @@ -34,18 +33,20 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; -import org.vlog4j.client.picocli.SaveModel; public class SaveModelTest { - private final String outputConfigurationBase = " --save-model: %b\n --output-model-directory: %s\n"; - private final String defaultDir = "model"; - private final SaveModel saveTrueDefaultDir = new SaveModel(true, defaultDir); - private final SaveModel saveTrueEmptyDir = new SaveModel(true, ""); - private final SaveModel saveTrueNullDir = new SaveModel(true, null); - private final SaveModel saveFalseDefaultDir = new SaveModel(false, defaultDir); - private final SaveModel saveFalseEmptyDir = new SaveModel(false, ""); - private final SaveModel saveFalseNullDir = new SaveModel(false, null); + private final static SaveModel saveTrueDefaultDir = new SaveModel(); + private final static SaveModel saveTrueEmptyDir = new SaveModel(true, ""); + private final static SaveModel saveTrueNullDir = new SaveModel(true, null); + private final static SaveModel saveFalseDefaultDir = new SaveModel(); + private final static SaveModel saveFalseEmptyDir = new SaveModel(false, ""); + private final static SaveModel saveFalseNullDir = new SaveModel(false, null); + + static { + saveTrueDefaultDir.setSaveModel(true); + saveFalseDefaultDir.setSaveModel(false); + } @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @@ -132,39 +133,63 @@ public void mkdir_saveFalse() throws IOException { } @Test - public void printConfiguration_saveTrueDefaultDir() { - assertEquals(String.format(outputConfigurationBase, true, defaultDir), - captureOutputPrintConfiguration(saveTrueDefaultDir)); + public void isSaveModel_saveTrueDefaultDir() { + assertTrue(saveTrueDefaultDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveTrueDefaultDir() { + assertEquals(SaveModel.DEFAULT_OUTPUT_DIR_NAME, saveTrueDefaultDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveTrueEmptyDir() { + assertTrue(saveTrueEmptyDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveTrueEmptyDir() { + assertEquals(StringUtils.EMPTY, saveTrueEmptyDir.getOutputModelDirectory()); } @Test - public void printConfiguration_saveTrueEmptyDir() { - assertEquals(String.format(outputConfigurationBase, true, ""), - captureOutputPrintConfiguration(saveTrueEmptyDir)); + public void isSaveModel_saveTrueNullDir() { + assertTrue(saveTrueNullDir.isSaveModel()); } @Test - public void printConfiguration_saveTrueNullDir() { - assertEquals(String.format(outputConfigurationBase, true, null), - captureOutputPrintConfiguration(saveTrueNullDir)); + public void getOutputModelDirectory_saveTrueNullDir() { + assertNull(saveTrueNullDir.getOutputModelDirectory()); } @Test - public void printConfiguration_saveFalseDefaultDir() { - assertEquals(String.format(outputConfigurationBase, false, defaultDir), - captureOutputPrintConfiguration(saveFalseDefaultDir)); + public void isSaveModel_saveFalseDefaultDir() { + assertFalse(saveFalseDefaultDir.isSaveModel()); } @Test - public void printConfiguration_saveFalseEmptyDir() { - assertEquals(String.format(outputConfigurationBase, false, ""), - captureOutputPrintConfiguration(saveFalseEmptyDir)); + public void getOutputModelDirectory_saveFalseDefaultDir() { + assertEquals(SaveModel.DEFAULT_OUTPUT_DIR_NAME, saveFalseDefaultDir.getOutputModelDirectory()); } @Test - public void printConfiguration_saveFalseNullDir() { - assertEquals(String.format(outputConfigurationBase, false, null), - captureOutputPrintConfiguration(saveFalseNullDir)); + public void isSaveModel_saveFalseEmptyDir() { + assertFalse(saveFalseEmptyDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveFalseEmptyDir() { + assertEquals(StringUtils.EMPTY, saveFalseEmptyDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveFalseNullDir() { + assertFalse(saveFalseNullDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveFalseNullDir() { + assertNull(saveFalseNullDir.getOutputModelDirectory()); } @Test @@ -185,20 +210,4 @@ public void setOutputModelDirectory_and_getOutputModelDirectory() { assertNull(sm.getOutputModelDirectory()); } - private String captureOutputPrintConfiguration(SaveModel sm) { - // Output Variables - ByteArrayOutputStream result = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(result); - // Save default System.out - PrintStream systemOut = System.out; - // Change System.out - System.setOut(ps); - // Do something - sm.printConfiguration(); - // Restore previous state - System.out.flush(); - System.setOut(systemOut); - // return result - return result.toString(); - } } diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java index 65988e564..b7f684f61 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java @@ -5,10 +5,8 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; -import java.io.PrintStream; import org.junit.Rule; @@ -36,15 +34,14 @@ import org.junit.rules.TemporaryFolder; public class SaveQueryResultsTest { - private final String outputConfigurationBase = " --save-query-results: %b\n --output-query-result-directory: %s\n"; - private final String defaultDir = "query-results"; - private final SaveQueryResults saveTrueDefaultDir = new SaveQueryResults(true, defaultDir); - private final SaveQueryResults saveTrueEmptyDir = new SaveQueryResults(true, ""); - private final SaveQueryResults saveTrueNullDir = new SaveQueryResults(true, null); - private final SaveQueryResults saveFalseDefaultDir = new SaveQueryResults(false, defaultDir); - private final SaveQueryResults saveFalseEmptyDir = new SaveQueryResults(false, ""); - private final SaveQueryResults saveFalseNullDir = new SaveQueryResults(false, null); + private static final SaveQueryResults saveTrueDefaultDir = new SaveQueryResults(true, + SaveQueryResults.DEFAULT_OUTPUT_DIR_NAME); + private static final SaveQueryResults saveTrueEmptyDir = new SaveQueryResults(true, ""); + private static final SaveQueryResults saveTrueNullDir = new SaveQueryResults(true, null); + private static final SaveQueryResults saveFalseDefaultDir = new SaveQueryResults(); + private static final SaveQueryResults saveFalseEmptyDir = new SaveQueryResults(false, ""); + private static final SaveQueryResults saveFalseNullDir = new SaveQueryResults(false, null); @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @@ -131,39 +128,13 @@ public void mkdir_saveFalse() throws IOException { } @Test - public void printConfiguration_saveTrueDefaultDir() { - assertEquals(String.format(outputConfigurationBase, true, defaultDir), - captureOutputPrintConfiguration(saveTrueDefaultDir)); + public void isSaveResultsl_saveFalseDefaultDir() { + assertFalse(saveFalseDefaultDir.isSaveResults()); } @Test - public void printConfiguration_saveTrueEmptyDir() { - assertEquals(String.format(outputConfigurationBase, true, ""), - captureOutputPrintConfiguration(saveTrueEmptyDir)); - } - - @Test - public void printConfiguration_saveTrueNullDir() { - assertEquals(String.format(outputConfigurationBase, true, null), - captureOutputPrintConfiguration(saveTrueNullDir)); - } - - @Test - public void printConfiguration_saveFalseDefaultDir() { - assertEquals(String.format(outputConfigurationBase, false, defaultDir), - captureOutputPrintConfiguration(saveFalseDefaultDir)); - } - - @Test - public void printConfiguration_saveFalseEmptyDir() { - assertEquals(String.format(outputConfigurationBase, false, ""), - captureOutputPrintConfiguration(saveFalseEmptyDir)); - } - - @Test - public void printConfiguration_saveFalseNullDir() { - assertEquals(String.format(outputConfigurationBase, false, null), - captureOutputPrintConfiguration(saveFalseNullDir)); + public void getOutputQueryResultDirectory_saveFalseDefaultDir() { + assertEquals(SaveQueryResults.DEFAULT_OUTPUT_DIR_NAME, saveFalseDefaultDir.getOutputQueryResultDirectory()); } @Test @@ -184,20 +155,4 @@ public void setOutputQueryResultDirectory_and_getOutputQueryResultsDirectory() { assertNull(srq.getOutputQueryResultDirectory()); } - private String captureOutputPrintConfiguration(SaveQueryResults srq) { - // Output Variables - ByteArrayOutputStream result = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(result); - // Save default System.out - PrintStream systemOut = System.out; - // Change System.out - System.setOut(ps); - // Do something - srq.printConfiguration(); - // Restore previous state - System.out.flush(); - System.setOut(systemOut); - // return result - return result.toString(); - } } From f658ca589959de7ba8f31aa3ada2a8916ddfc3ad Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 26 Nov 2019 20:36:06 +0100 Subject: [PATCH 0628/1255] added some fixes for abstractConstants --- .../core/model/implementation/Serializer.java | 31 +++++--- .../implementation/InMemoryDataSource.java | 6 +- .../vlog4j/syntax/parser/EntityTest.java | 75 +++++++++++-------- .../vlog4j/syntax/parser/RuleParserTest.java | 20 ----- 4 files changed, 66 insertions(+), 66 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 92cfe77ff..13e187692 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -55,10 +55,16 @@ public final class Serializer { public static final String AT = "@"; public static final String SOURCE = "@source "; public static final String COLON = ": "; + public static final String COLON_UNSPACED = ":"; public static final String CARET = "^"; public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; - public static final String ESCAPED_QUOTE = "\""; + public static final String QUOTE = "\""; + public static final String DOUBLE = "[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?"; + public static final String INTEGER = "^[-+]?\\d+$"; + public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String TRUE = "true"; + public static final String FALSE = "false"; /** * Constructor. @@ -103,7 +109,8 @@ public static String getString(Literal literal) { } else { stringBuilder.append(COMMA); } - stringBuilder.append(term.getSyntacticRepresentation()); + String string = term.getSyntacticRepresentation(); + stringBuilder.append(string); } stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); @@ -128,11 +135,13 @@ public static String getFactString(Fact fact) { * @return String representation corresponding to a given {@link Constant}. */ public static String getString(Constant constant) { - return constant.getName(); - } - - public static String getString(DatatypeConstant constant) { - return getShortConstantName(constant); + if (constant.getName().contains(COLON_UNSPACED) || constant.getName().matches(INTEGER) + || constant.getName().matches(DOUBLE) || constant.getName().matches(DECIMAL) + || constant.getName().equals(TRUE) || constant.getName().equals(FALSE)) { + return LESS_THAN + constant.getName() + MORE_THAN; + } else { + return constant.getName(); + } } /** @@ -226,7 +235,7 @@ public static String getString(Conjunction conjunction) { * {@link LanguageStringConstant}. */ public static String getConstantName(LanguageStringConstant languageStringConstant) { - return ESCAPED_QUOTE + escape(languageStringConstant.getString()) + ESCAPED_QUOTE + AT + return QUOTE + escape(languageStringConstant.getString()) + QUOTE + AT + languageStringConstant.getLanguageTag(); } @@ -239,9 +248,9 @@ public static String getConstantName(LanguageStringConstant languageStringConsta * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getShortConstantName(DatatypeConstant datatypeConstant) { + public static String getString(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return ESCAPED_QUOTE + datatypeConstant.getLexicalValue() + ESCAPED_QUOTE; + return QUOTE + datatypeConstant.getLexicalValue() + QUOTE; } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -264,7 +273,7 @@ public static String getShortConstantName(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(DatatypeConstant datatypeConstant) { - return ESCAPED_QUOTE + escape(datatypeConstant.getLexicalValue()) + ESCAPED_QUOTE + CARET + CARET + LESS_THAN + return QUOTE + escape(datatypeConstant.getLexicalValue()) + QUOTE + CARET + CARET + LESS_THAN + datatypeConstant.getDatatype() + MORE_THAN; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 8c005a782..3bd708525 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -94,10 +94,8 @@ public String[][] getData() { @Override public String getSyntacticRepresentation() { - - String message = "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"; - StringBuilder facts = new StringBuilder(""); - facts.append(message); + StringBuilder facts = new StringBuilder( + "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < this.getData().length; i++) { for (int j = 0; j < data[i].length; j++) { facts.append(data[i][j] + " "); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index b0460357f..a79b7b853 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -24,7 +24,7 @@ import java.io.File; import java.io.IOException; import java.net.URL; -import java.util.Arrays; + import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; @@ -54,18 +54,23 @@ public class EntityTest { public void languageStringConstantToStringRoundTripTest() throws ParsingException { LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); Predicate p = Expressions.makePredicate("p", 1); - Fact f3 = Expressions.makeFact(p, Arrays.asList(s)); + Fact f3 = Expressions.makeFact(p, s); assertEquals(f3, RuleParser.parseFact(f3.toString())); } - public void abstractConstantToStringRoundTripTest() throws ParsingException { + @Test + public void abstractConstantStringToStringRoundTripTest() throws ParsingException { AbstractConstantImpl f = new AbstractConstantImpl("f"); - AbstractConstantImpl a = new AbstractConstantImpl("1"); - Predicate p = Expressions.makePredicate("p", 1); - Fact f1 = Expressions.makeFact(p, Arrays.asList(f)); - Fact f2 = Expressions.makeFact(p, Arrays.asList(a)); + Fact f1 = Expressions.makeFact("p", f); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantAbsoluteToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("http://example.org/test"); + Fact f1 = Expressions.makeFact("p", a); + System.out.println(f1.toString()); assertEquals(f1, RuleParser.parseFact(f1.toString())); - assertEquals(f2, RuleParser.parseFact(f2.toString())); } @Test @@ -95,27 +100,34 @@ public void conjunctionToStringRoundTripTest() throws ParsingException { Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); - assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + assertEquals(bodyLiterals, RuleParser.parseRule(rule1.toString()).getBody()); + assertEquals(headPositiveLiterals, RuleParser.parseRule(rule1.toString()).getHead()); + } + + @Test + public void positiveLiteralToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + assertEquals(atom1, RuleParser.parseLiteral(atom1.toString())); } @Test public void literalToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); Variable x = Expressions.makeUniversalVariable("X"); - Variable z = Expressions.makeExistentialVariable("Z"); NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); - PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); - Rule rule1 = Expressions.makeRule(headAtom1, atom1); - assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + assertEquals(atom1, RuleParser.parseLiteral(atom1.toString())); } @Test public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingException { String shortDoubleConstant = "12.345E67"; assertEquals(shortDoubleConstant, - RuleParser.parseFact("p(\"12.345E67\"^^).").getArguments() - .get(0).toString()); - assertEquals(shortDoubleConstant, RuleParser.parseFact("p(12.345E67).").getArguments().get(0).toString()); + RuleParser.parseFact("p(\"" + shortDoubleConstant + "\"^^).") + .getArguments().get(0).toString()); + assertEquals(shortDoubleConstant, + RuleParser.parseFact("p(" + shortDoubleConstant + ").").getArguments().get(0).toString()); } @Test @@ -128,26 +140,31 @@ public void datatypeFloatConstantToStringRoundTripTest() throws ParsingException @Test public void datatypeStringConstantToStringRoundTripTest() throws ParsingException { String shortStringConstant = "\"data\""; - assertEquals(shortStringConstant, RuleParser - .parseFact("p(\"data\"^^).").getArguments().get(0).toString()); - assertEquals(shortStringConstant, RuleParser.parseFact("p(\"data\").").getArguments().get(0).toString()); + assertEquals(shortStringConstant, + RuleParser.parseFact("p(" + shortStringConstant + "^^).") + .getArguments().get(0).toString()); + assertEquals(shortStringConstant, + RuleParser.parseFact("p(" + shortStringConstant + ").").getArguments().get(0).toString()); } @Test public void datatypeIntegerConstantToStringRoundTripTest() throws ParsingException { String shortIntegerConstant = "1"; - assertEquals(shortIntegerConstant, RuleParser.parseFact("p(\"1\"^^).") - .getArguments().get(0).toString()); - assertEquals(shortIntegerConstant, RuleParser.parseFact("p(1).").getArguments().get(0).toString()); + assertEquals(shortIntegerConstant, + RuleParser.parseFact("p(\"" + shortIntegerConstant + "\"^^).") + .getArguments().get(0).toString()); + assertEquals(shortIntegerConstant, + RuleParser.parseFact("p(" + shortIntegerConstant + ").").getArguments().get(0).toString()); } @Test public void datatypeDecimalToStringRoundTripTest() throws ParsingException { String shortDecimalConstant = "0.23"; assertEquals(shortDecimalConstant, - RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0) - .toString()); - assertEquals(shortDecimalConstant, RuleParser.parseFact("p(0.23).").getArguments().get(0).toString()); + RuleParser.parseFact("p(\"" + shortDecimalConstant + "\"^^).") + .getArguments().get(0).toString()); + assertEquals(shortDecimalConstant, + RuleParser.parseFact("p(" + shortDecimalConstant + ").").getArguments().get(0).toString()); } @Test @@ -164,10 +181,8 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep @Test public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - String INPUT_FOLDER = "src/test/data/input/"; - File unzippedRdfFile = new File(INPUT_FOLDER + "file.nt"); Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -177,10 +192,8 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio @Test public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - String INPUT_FOLDER = "src/test/data/input/"; - String csvFile = INPUT_FOLDER + "file.csv"; Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index c5d4b7ecb..4e8a4fd3f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -412,24 +412,4 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } - @Test - public void DatatypeConstantgRoundTripTest() throws ParsingException { - DatatypeConstantImpl datatypeConstantString = new DatatypeConstantImpl("data", PrefixDeclarations.XSD_STRING); - DatatypeConstantImpl datatypeConstantInteger = new DatatypeConstantImpl("1", PrefixDeclarations.XSD_INTEGER); - DatatypeConstantImpl datatypeConstantFloat = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_FLOAT); - DatatypeConstantImpl datatypeConstantDouble = new DatatypeConstantImpl("0.5", PrefixDeclarations.XSD_DOUBLE); - DatatypeConstantImpl datatypeConstantDecimal = new DatatypeConstantImpl("0.23", PrefixDeclarations.XSD_DECIMAL); - assertEquals(datatypeConstantString, - RuleParser.parseFact("p(\"data\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantInteger, - RuleParser.parseFact("p(\"1\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantFloat, - RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantDouble, - RuleParser.parseFact("p(\"0.5\"^^).").getArguments().get(0)); - assertEquals(datatypeConstantDecimal, - RuleParser.parseFact("p(\"0.23\"^^).").getArguments().get(0)); - - } - } From 195fd7ce863c3d53205b2238c7df41a2d831724a Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 27 Nov 2019 12:37:16 +0100 Subject: [PATCH 0629/1255] added roundtripping for absolute abstractConstants --- .../vlog4j/syntax/parser/EntityTest.java | 29 ++++++++++++++++++- .../vlog4j/syntax/parser/RuleParserTest.java | 4 +-- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index a79b7b853..9bb9e68ef 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -69,7 +69,34 @@ public void abstractConstantStringToStringRoundTripTest() throws ParsingExceptio public void abstractConstantAbsoluteToStringRoundTripTest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("http://example.org/test"); Fact f1 = Expressions.makeFact("p", a); - System.out.println(f1.toString()); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeDoubleToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("4.2E9"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeIntegerToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("11"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeBooleanToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("false"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeDecimalToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("-5.0"); + Fact f1 = Expressions.makeFact("p", b); assertEquals(f1, RuleParser.parseFact(f1.toString())); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 4e8a4fd3f..8b0693a54 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,8 +19,7 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import java.io.File; import java.io.IOException; @@ -41,7 +40,6 @@ import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; From b4801bd03718342541d9e9abf27942800230da4b Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 29 Nov 2019 18:15:41 +0100 Subject: [PATCH 0630/1255] added predicate relative/absolute support --- .../model/api/LanguageStringConstant.java | 2 +- .../model/implementation/Expressions.java | 12 ++++ .../core/model/implementation/Serializer.java | 26 +++++--- .../vlog4j/syntax/parser/EntityTest.java | 63 +++++++++++++++++++ 4 files changed, 92 insertions(+), 11 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index b3694d565..2c150b507 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -65,6 +65,6 @@ default String getDatatype() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return Serializer.getConstantName(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index eedc94f8c..625b0c4c6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -46,7 +46,19 @@ * @author Markus Krötzsch * */ + public final class Expressions { + + public static final String COLON_UNSPACED = ":"; + public static final String LESS_THAN = "<"; + public static final String MORE_THAN = ">"; + public static final String QUOTE = "\""; + public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String INTEGER = "^[-+]?\\d+$"; + public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String TRUE = "true"; + public static final String FALSE = "false"; + /** * Private constructor prevents this utilities class to be instantiated. */ diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 13e187692..59485b205 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.implementation; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; + /*- * #%L * VLog4j Core Components @@ -60,7 +62,7 @@ public final class Serializer { public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; public static final String QUOTE = "\""; - public static final String DOUBLE = "[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?"; + public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; public static final String INTEGER = "^[-+]?\\d+$"; public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; public static final String TRUE = "true"; @@ -77,6 +79,16 @@ private static String escape(String string) { return string.replace("\\", "\\\\").replace("\"", "\\\""); } + private static String checkRelativeAbsoluteIri(String string) { + if ((string.contains(COLON_UNSPACED) || string.matches(INTEGER) || string.matches(DOUBLE) + || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE)) + && (!string.contains(LESS_THAN))) { + return LESS_THAN + string + MORE_THAN; + } else { + return string; + } + } + /** * Creates a String representation of a given {@link Rule}. * @@ -101,7 +113,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(literal.getPredicate().getName()).append(OPEN_PARENTHESIS); + stringBuilder.append(checkRelativeAbsoluteIri(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -134,14 +146,8 @@ public static String getFactString(Fact fact) { * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ - public static String getString(Constant constant) { - if (constant.getName().contains(COLON_UNSPACED) || constant.getName().matches(INTEGER) - || constant.getName().matches(DOUBLE) || constant.getName().matches(DECIMAL) - || constant.getName().equals(TRUE) || constant.getName().equals(FALSE)) { - return LESS_THAN + constant.getName() + MORE_THAN; - } else { - return constant.getName(); - } + public static String getString(AbstractConstant constant) { + return checkRelativeAbsoluteIri(constant.getName()); } /** diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 9bb9e68ef..cff88cb13 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -100,6 +101,68 @@ public void abstractConstantRelativeDecimalToStringRoundTripTest() throws Parsin assertEquals(f1, RuleParser.parseFact(f1.toString())); } + @Test + public void iriRoundTripTest() throws ParsingException { + String abstractConstant = "<1.0>"; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void iriRoundTripTest2() throws ParsingException { + String abstractConstant = ""; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void iriRoundTripTest3() throws ParsingException { + String abstractConstant = ""; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void predicateIriRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = Expressions.makeFact("1.e1", a); + assertEquals(f, RuleParser.parseFact(f.toString())); + } + + @Test + public void predicateIriRoundTripTest2() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f2 = Expressions.makeFact("1.e1", a); + assertEquals(f, f2); + } + + @Test + public void predicateIriRoundTripTest3() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("(a)."); + Fact f2 = Expressions.makeFact("a:b", a); + assertEquals(f, f2); + } + + @Test + public void predicateRoundTripTest3() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f2 = Expressions.makeFact("a:1", a); + assertEquals(f2, RuleParser.parseFact(f2.toString())); + } + + @Test + public void iriAngularBracketsTest() throws ParsingException { + String constant = "a"; + Fact fact = RuleParser.parseFact("p(" + constant + ")"); + Term abstractConst = fact.getArguments().get(0); + assertEquals(constant, abstractConst.toString()); + Fact fact2 = RuleParser.parseFact("p(<" + constant + ">)"); + Term abstractConst2 = fact2.getArguments().get(0); + assertEquals(abstractConst, abstractConst2); + } + @Test public void ruleToStringRoundTripTest() throws ParsingException { Constant c = Expressions.makeAbstractConstant("c"); From e2ec110a78b874733290d722c510657dd0b18921 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 2 Dec 2019 12:07:29 +0100 Subject: [PATCH 0631/1255] added license header --- .../client/picocli/PrintQueryResultsTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java index 13469c358..b949a5ff3 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -1,5 +1,25 @@ package org.vlog4j.client.picocli; +/*- + * #%L + * VLog4j Client + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; From 9030082f4b27123df4a3c26fa6b136c760a5c1c0 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 2 Dec 2019 12:19:12 +0100 Subject: [PATCH 0632/1255] eliminate client dependency to examples, duplicate code in ClientUtils --- vlog4j-client/pom.xml | 8 +- .../vlog4j/client/picocli/ClientUtils.java | 122 ++++++++++++++++++ .../picocli/VLog4jClientMaterialize.java | 7 +- 3 files changed, 129 insertions(+), 8 deletions(-) create mode 100644 vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index 267b14896..fbee05c25 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -31,10 +31,10 @@ vlog4j-parser ${project.version} - - ${project.groupId} - vlog4j-examples - ${project.version} + + org.slf4j + slf4j-log4j12 + ${slf4j.version} info.picocli diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java new file mode 100644 index 000000000..5893f86c2 --- /dev/null +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java @@ -0,0 +1,122 @@ +package org.vlog4j.client.picocli; + +/*- + * #%L + * VLog4j Client + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Iterator; + +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; + +/** + * Utility class for interacting with the vlog4j client. + * + * @author dragoste + * + */ +public final class ClientUtils { + + /** + * Private constructor. This is a utility class. Therefore, it is best practice + * to do the following: (1) Make the class final, (2) make its constructor + * private, (3) make all its fields and methods static. This prevents the + * classes instantiation and inheritance. + */ + private ClientUtils() { + + } + + /** + * Defines how messages should be logged. This method can be modified to + * restrict the logging messages that are shown on the console or to change + * their formatting. See the documentation of Log4J for details on how to do + * this. + * + * Note: The VLog C++ backend performs its own logging. The log-level for this + * can be configured using + * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. + * It is also possible to specify a separate log file for this part of the logs. + */ + public static void configureLogging() { + // Create the appender that will write log messages to the console. + final ConsoleAppender consoleAppender = new ConsoleAppender(); + // Define the pattern of log messages. + // Insert the string "%c{1}:%L" to also show class name and line. + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + consoleAppender.setLayout(new PatternLayout(pattern)); + // Change to Level.ERROR for fewer messages: + consoleAppender.setThreshold(Level.INFO); + + consoleAppender.activateOptions(); + Logger.getRootLogger().addAppender(consoleAppender); + } + + /** + * Prints out the answers given by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + */ + public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { + System.out.println("Answers to query " + queryAtom + " :"); + try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { + answers.forEachRemaining(answer -> System.out.println(" - " + answer)); + + System.out.println("Query answers are: " + answers.getCorrectness()); + } + System.out.println(); + } + + /** + * Returns the number of answers returned by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + */ + public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { + try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { + return iteratorSize(answers); + } + } + + /** + * Returns the size of an iterator. + * + * @FIXME This is an inefficient way of counting results. It should be done at a + * lower level instead + * @param Iterator to iterate over + * @return number of elements in iterator + */ + private static int iteratorSize(final Iterator iterator) { + int size = 0; + for (; iterator.hasNext(); ++size) { + iterator.next(); + } + return size; + } + +} diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java index e2c0f790c..84f713662 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -32,7 +32,6 @@ import org.semanticweb.vlog4j.core.reasoner.LogLevel; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -87,7 +86,7 @@ public class VLog4jClientMaterialize implements Runnable { @Override public void run() { - ExamplesUtils.configureLogging(); + ClientUtils.configureLogging(); /* Validate configuration */ this.validateConfiguration(); @@ -193,7 +192,7 @@ private void answerQueries(final Reasoner reasoner) { this.doPrintResults(reasoner, query); } else if (this.printQueryResults.isComplete()) { // print facts - ExamplesUtils.printOutQueryAnswers(query, reasoner); + ClientUtils.printOutQueryAnswers(query, reasoner); } } } @@ -233,7 +232,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { System.out.println( - "Number of query answers in " + query + ": " + ExamplesUtils.getQueryAnswerCount(query, reasoner)); + "Number of query answers in " + query + ": " + ClientUtils.getQueryAnswerCount(query, reasoner)); } private String queryOputputPath(final PositiveLiteral query) { From bb2ee938d55c9ffdb5dc90942d1cd3d9ccd02eb1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 2 Dec 2019 13:43:19 +0100 Subject: [PATCH 0633/1255] remove addons --- .travis.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index f50b6c2fa..ac5033743 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,13 +4,13 @@ jdk: # - oraclejdk8 # - oraclejdk9 -addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-5 - - g++-5 +##addons: + ##apt: + ##sources: + #- ubuntu-toolchain-r-test +## packages: +# - gcc-5 +# - g++-5 ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar #before_install: From 5ebbd0d592ea1dbf4d35e2d14da5028a7d251637 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Mon, 2 Dec 2019 13:43:37 +0100 Subject: [PATCH 0634/1255] remove gcc configurations --- build-vlog-library.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index 599ed109f..7008b2fdd 100644 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -6,10 +6,7 @@ then echo "Using cached VLog JAR." else echo "Building new VLog JAR." - if [ "$(which gcc-5)x" != "x" ]; then - export CC=gcc-5 && export CXX=g++-5 - fi - mkdir -p local_builds + mkdir -p local_builds rm -rf build-vlog mkdir build-vlog cd build-vlog From dc3cea51f43d9fd277473ac785457a126cdcc3c1 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 13:57:53 +0100 Subject: [PATCH 0635/1255] added some changes --- .../model/implementation/Expressions.java | 10 ---------- .../vlog4j/syntax/parser/EntityTest.java | 19 +------------------ .../vlog4j/syntax/parser/RuleParserTest.java | 18 ++++++++++++++++++ 3 files changed, 19 insertions(+), 28 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java index 625b0c4c6..2ffbfcf28 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java @@ -49,16 +49,6 @@ public final class Expressions { - public static final String COLON_UNSPACED = ":"; - public static final String LESS_THAN = "<"; - public static final String MORE_THAN = ">"; - public static final String QUOTE = "\""; - public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String INTEGER = "^[-+]?\\d+$"; - public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String TRUE = "true"; - public static final String FALSE = "false"; - /** * Private constructor prevents this utilities class to be instantiated. */ diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index cff88cb13..8bd7cb008 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -129,22 +129,6 @@ public void predicateIriRoundTripTest() throws ParsingException { assertEquals(f, RuleParser.parseFact(f.toString())); } - @Test - public void predicateIriRoundTripTest2() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); - Fact f2 = Expressions.makeFact("1.e1", a); - assertEquals(f, f2); - } - - @Test - public void predicateIriRoundTripTest3() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("(a)."); - Fact f2 = Expressions.makeFact("a:b", a); - assertEquals(f, f2); - } - @Test public void predicateRoundTripTest3() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); @@ -223,8 +207,7 @@ public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingExceptio @Test public void datatypeFloatConstantToStringRoundTripTest() throws ParsingException { String floatConstant = "\"0.5\"^^"; - assertEquals(floatConstant, RuleParser.parseFact("p(\"0.5\"^^).") - .getArguments().get(0).toString()); + assertEquals(floatConstant, RuleParser.parseFact("p(" + floatConstant + ").").getArguments().get(0).toString()); } @Test diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8b0693a54..66a5b4748 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -39,6 +40,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -410,4 +412,20 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } + @Test + public void predicateIriRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f2 = Expressions.makeFact("1.e1", a); + assertEquals(f, f2); + } + + @Test + public void predicateIriRoundTripTest2() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("(a)."); + Fact f2 = Expressions.makeFact("a:b", a); + assertEquals(f, f2); + } + } From 945e5920360cb005c579a834ee261bce14eacb39 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 2 Dec 2019 14:00:13 +0100 Subject: [PATCH 0636/1255] remove commented addons lines. --- .travis.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index ac5033743..24e616797 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,14 +4,6 @@ jdk: # - oraclejdk8 # - oraclejdk9 -##addons: - ##apt: - ##sources: - #- ubuntu-toolchain-r-test -## packages: -# - gcc-5 -# - g++-5 - ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar #before_install: # - sudo apt-get install gcc-5 -y From 90c21ee72d427a69f11fe438678b88f0557c2991 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 15:00:13 +0100 Subject: [PATCH 0637/1255] added some changes --- .../core/model/implementation/Serializer.java | 3 +-- .../vlog4j/syntax/parser/RuleParserTest.java | 19 ------------------- 2 files changed, 1 insertion(+), 21 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 59485b205..ad3d345b2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -81,8 +81,7 @@ private static String escape(String string) { private static String checkRelativeAbsoluteIri(String string) { if ((string.contains(COLON_UNSPACED) || string.matches(INTEGER) || string.matches(DOUBLE) - || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE)) - && (!string.contains(LESS_THAN))) { + || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE))) { return LESS_THAN + string + MORE_THAN; } else { return string; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 66a5b4748..2be0725a1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -32,7 +32,6 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -40,7 +39,6 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -411,21 +409,4 @@ public void testBlankPredicateName() throws ParsingException { String input = "_:(a) ."; RuleParser.parse(input); } - - @Test - public void predicateIriRoundTripTest() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); - Fact f2 = Expressions.makeFact("1.e1", a); - assertEquals(f, f2); - } - - @Test - public void predicateIriRoundTripTest2() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("(a)."); - Fact f2 = Expressions.makeFact("a:b", a); - assertEquals(f, f2); - } - } From 9350e231851404a7e8dc7e6d566c6527634f78e0 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 15:28:05 +0100 Subject: [PATCH 0638/1255] added some changes --- .../vlog4j/syntax/parser/RuleParserTest.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 2be0725a1..bdf9ac788 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -32,6 +32,7 @@ import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -39,6 +40,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -409,4 +411,20 @@ public void testBlankPredicateName() throws ParsingException { String input = "_:(a) ."; RuleParser.parse(input); } + + @Test + public void predicateNormalIriEqualityTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f2 = Expressions.makeFact("1.e1", a); + assertEquals(f, f2); + } + + @Test + public void predicateNormalIriEqualityTest2() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = RuleParser.parseFact("(a)."); + Fact f2 = Expressions.makeFact("a:b", a); + assertEquals(f, f2); + } } From 6f7d52b3fb802d2c168b36416dde75764e4458b3 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 16:12:04 +0100 Subject: [PATCH 0639/1255] fixed merge problems --- .../reasoner/implementation/VLogReasoner.java | 1605 +++++++++-------- 1 file changed, 812 insertions(+), 793 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b630a6970..5376e0896 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,793 +1,812 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import java.io.IOException; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; -import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.MaterializationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.CyclicCheckResult; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Reasoner implementation using the VLog backend. - * - * - * - * @author Markus Kroetzsch - * - */ -public class VLogReasoner implements Reasoner { - private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - - /** - * Dummy data source declaration for predicates for which we have explicit local - * facts in the input. - * - * @author Markus Kroetzsch - * - */ - class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { - - final Predicate predicate; - - public LocalFactsDataSourceDeclaration(Predicate predicate) { - this.predicate = predicate; - } - - @Override - public T accept(StatementVisitor statementVisitor) { - return statementVisitor.visit(this); - } - - @Override - public Predicate getPredicate() { - return this.predicate; - } - - @Override - public DataSource getDataSource() { - return null; - } - - @Override - public int hashCode() { - return predicate.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return predicate.equals(other.predicate); - } - - @Override - public String getSyntacticRepresentation() { - - throw new UnsupportedOperationException( - "This method is not implemented for type LocalFactsDataSourceDeclaration"); - } - - } - - /** - * Local visitor implementation for processing statements upon loading. Internal - * index structures are updated based on the statements that are detected. - * - * @author Markus Kroetzsch - * - */ - class LoadKbVisitor implements StatementVisitor { - - public void clearIndexes() { - edbPredicates.clear(); - idbPredicates.clear(); - aliasedEdbPredicates.clear(); - aliasesForEdbPredicates.clear(); - directEdbFacts.clear(); - rules.clear(); - } - - @Override - public Void visit(Fact statement) { - final Predicate predicate = statement.getPredicate(); - registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList(); - facts.add(statement); - directEdbFacts.put(predicate, facts); - } else { - directEdbFacts.get(predicate).add(statement); - } - return null; - } - - @Override - public Void visit(Rule statement) { - rules.add(statement); - for (final PositiveLiteral positiveLiteral : statement.getHead()) { - final Predicate predicate = positiveLiteral.getPredicate(); - if (!idbPredicates.contains(predicate)) { - if (edbPredicates.containsKey(predicate)) { - addEdbAlias(edbPredicates.get(predicate)); - edbPredicates.remove(predicate); - } - idbPredicates.add(predicate); - } - } - return null; - } - - @Override - public Void visit(DataSourceDeclaration statement) { - registerEdbDeclaration(statement); - return null; - } - - void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { - if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { - addEdbAlias(dataSourceDeclaration); - } - } else { - final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); - if (currentMainDeclaration == null) { - edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { - addEdbAlias(currentMainDeclaration); - addEdbAlias(dataSourceDeclaration); - edbPredicates.remove(predicate); - } // else: predicate already known to have local facts (only) - } - } - - void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - Predicate aliasPredicate; - if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { - aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); - } else { - aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), - predicate.getArity()); - } - aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - aliasedEdbPredicates.add(predicate); - - final List terms = new ArrayList<>(); - for (int i = 1; i <= predicate.getArity(); i++) { - terms.add(new UniversalVariableImpl("X" + i)); - } - final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); - final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), - new ConjunctionImpl(Arrays.asList(body))); - rules.add(rule); - } - - } - - final KnowledgeBase knowledgeBase; - final VLog vLog = new VLog(); - - final Map aliasesForEdbPredicates = new HashMap<>(); - final Set idbPredicates = new HashSet<>(); - final Map edbPredicates = new HashMap<>(); - final Set aliasedEdbPredicates = new HashSet<>(); - final Map> directEdbFacts = new HashMap<>(); - final Set rules = new HashSet<>(); - - private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; - private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; - - private LogLevel internalLogLevel = LogLevel.WARNING; - private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; - private Integer timeoutAfterSeconds; - private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; - - /** - * Holds the state of the reasoning result. Has value {@code true} if reasoning - * has completed, {@code false} if it has been interrupted. - */ - private boolean reasoningCompleted; - - public VLogReasoner(KnowledgeBase knowledgeBase) { - super(); - this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addListener(this); - - setLogLevel(this.internalLogLevel); - } - - @Override - public KnowledgeBase getKnowledgeBase() { - return this.knowledgeBase; - } - - @Override - public void setAlgorithm(final Algorithm algorithm) { - Validate.notNull(algorithm, "Algorithm cannot be null!"); - validateNotClosed(); - this.algorithm = algorithm; - } - - @Override - public Algorithm getAlgorithm() { - return this.algorithm; - } - - @Override - public void setReasoningTimeout(Integer seconds) { - validateNotClosed(); - if (seconds != null) { - Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); - } - this.timeoutAfterSeconds = seconds; - } - - @Override - public Integer getReasoningTimeout() { - return this.timeoutAfterSeconds; - } - - @Override - public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - validateNotClosed(); - Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); - this.ruleRewriteStrategy = ruleRewritingStrategy; - } - - @Override - public RuleRewriteStrategy getRuleRewriteStrategy() { - return this.ruleRewriteStrategy; - } - - /* - * TODO Due to automatic predicate renaming, it can happen that an EDB predicate - * cannot be queried after loading unless reasoning has already been invoked - * (since the auxiliary rule that imports the EDB facts to the "real" predicate - * must be used). This issue could be weakened by rewriting queries to - * (single-source) EDB predicates internally when in such a state, - */ - // @Override - void load() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - loadKnowledgeBase(); - break; - case KB_LOADED: - case MATERIALISED: - // do nothing, all KB is already loaded - break; - case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); - default: - break; - } - } - - void loadKnowledgeBase() throws IOException { - LOGGER.info("Started loading knowledge base ..."); - final LoadKbVisitor visitor = new LoadKbVisitor(); - visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { - statement.accept(visitor); - } - - if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { - LOGGER.warn("No facts have been provided."); - } - - try { - this.vLog.start(getDataSourcesConfigurationString(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } - loadInMemoryDataSources(); - - validateDataSourcePredicateArities(); - - loadFacts(); - loadRules(); - - this.reasonerState = ReasonerState.KB_LOADED; - - // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; - - LOGGER.info("Finished loading knowledge base."); - } - - String getDataSourcesConfigurationString() { - final StringBuilder configStringBuilder = new StringBuilder(); - final Formatter formatter = new Formatter(configStringBuilder); - int dataSourceIndex = 0; - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, - dataSourceIndex, formatter); - } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, - dataSourceIndex, formatter); - } - formatter.close(); - return configStringBuilder.toString(); - } - - int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, - Formatter formatter) { - if (dataSource != null) { - final String configString = dataSource.toConfigString(); - if (configString != null) { - formatter.format(dataSource.toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - return dataSourceIndex + 1; - } - } - return dataSourceIndex; - } - - /** - * Checks if the loaded external data sources do in fact contain data of the - * correct arity. - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : edbPredicates.keySet()) { - validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); - } - for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { - validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), - dataSourceDeclaration.getDataSource()); - } - } - - void loadInMemoryDataSources() { - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); - } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); - } - } - - void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { - final InMemoryDataSource inMemoryDataSource; - if (dataSource instanceof InMemoryDataSource) { - inMemoryDataSource = (InMemoryDataSource) dataSource; - } else { - return; - } - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); - this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - } - - /** - * Checks if the loaded external data for a given source does in fact contain - * data of the correct arity for the given predidate. - * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) - throws IncompatiblePredicateArityException { - if (dataSource == null) - return; - try { - final int dataSourcePredicateArity = this.vLog - .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); - if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); - } else if (predicate.getArity() != dataSourcePredicateArity) { - throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - void loadFacts() { - for (final Predicate predicate : directEdbFacts.keySet()) { - Predicate aliasPredicate; - if (edbPredicates.containsKey(predicate)) { - aliasPredicate = predicate; - } else { - aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); - } - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - final String[][] vLogPredicateTuples = ModelToVLogConverter - .toVLogFactTuples(directEdbFacts.get(predicate)); - this.vLog.addData(vLogPredicateName, vLogPredicateTuples); - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - } - } - - void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); - final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter - .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); - try { - this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); - if (LOGGER.isDebugEnabled()) { - for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { - LOGGER.debug("Loaded rule {}.", rule.toString()); - } - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - @Override - public boolean reason() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - load(); - runChase(); - break; - case KB_LOADED: - runChase(); - break; - case KB_CHANGED: - resetReasoner(); - load(); - runChase(); - break; - case MATERIALISED: - runChase(); - break; - default: - break; - } - - return this.reasoningCompleted; - } - - private void runChase() { - LOGGER.info("Started materialisation of inferences ..."); - this.reasonerState = ReasonerState.MATERIALISED; - - final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; - try { - if (this.timeoutAfterSeconds == null) { - this.vLog.materialize(skolemChase); - this.reasoningCompleted = true; - } else { - this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final MaterializationException e) { - // FIXME: the message generated here is not guaranteed to be the correct - // interpretation of the exception that is caught - throw new RuntimeException( - "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", - e); - } - - if (this.reasoningCompleted) { - this.correctness = Correctness.SOUND_AND_COMPLETE; - LOGGER.info("Completed materialisation of inferences."); - } else { - this.correctness = Correctness.SOUND_BUT_INCOMPLETE; - LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); - } - } - - @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); - - final boolean filterBlanks = !includeBlanks; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - - TermQueryResultIterator stringQueryResultIterator; - try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); - } - - logWarningOnCorrectness(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); - } - - @Override - public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); - Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); - Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); - - final boolean filterBlanks = !includeBlanks; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - try { - this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } catch (final NonExistingPredicateException e1) { - throw new IllegalArgumentException(MessageFormat.format( - "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); - } - - logWarningOnCorrectness(); - return this.correctness; - } - - private void logWarningOnCorrectness() { - if (this.correctness != Correctness.SOUND_AND_COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); - } - } - - @Override - public void resetReasoner() { - validateNotClosed(); - this.reasonerState = ReasonerState.KB_NOT_LOADED; - this.vLog.stop(); - LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); - } - - @Override - public void close() { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.info("Reasoner is already closed."); - } else { - this.reasonerState = ReasonerState.CLOSED; - this.knowledgeBase.deleteListener(this); - this.vLog.stop(); - LOGGER.info("Reasoner closed."); - } - } - - @Override - public void setLogLevel(LogLevel logLevel) { - validateNotClosed(); - Validate.notNull(logLevel, "Log level cannot be null!"); - this.internalLogLevel = logLevel; - this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); - } - - @Override - public LogLevel getLogLevel() { - return this.internalLogLevel; - } - - @Override - public void setLogFile(String filePath) { - validateNotClosed(); - this.vLog.setLogFile(filePath); - } - - @Override - public boolean isJA() { - return checkAcyclicity(AcyclicityNotion.JA); - } - - @Override - public boolean isRJA() { - return checkAcyclicity(AcyclicityNotion.RJA); - } - - @Override - public boolean isMFA() { - return checkAcyclicity(AcyclicityNotion.MFA); - } - - @Override - public boolean isRMFA() { - return checkAcyclicity(AcyclicityNotion.RMFA); - } - - @Override - public boolean isMFC() { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Checking rules acyclicity is not allowed before loading!"); - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic("MFC"); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.CYCLIC); - } - - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - try { - load(); - } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 - throw new RuntimeException(e); - } - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); - } - - @Override - public CyclicityResult checkForCycles() { - final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); - if (acyclic) { - return CyclicityResult.ACYCLIC; - } else { - final boolean cyclic = isMFC(); - if (cyclic) { - return CyclicityResult.CYCLIC; - } - return CyclicityResult.UNDETERMINED; - } - } - - @Override - public void onStatementsAdded(List statementsAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementsAdded(statementsAdded); - updateCorrectness(); - } - - @Override - public void onStatementAdded(Statement statementAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementAdded(statementAdded); - updateCorrectness(); - } - - private void updateReasonerToKnowledgeBaseChanged() { - if (this.reasonerState.equals(ReasonerState.KB_LOADED) - || this.reasonerState.equals(ReasonerState.MATERIALISED)) { - - this.reasonerState = ReasonerState.KB_CHANGED; - } - } - - private void updateCorrectness() { - if (this.reasonerState == ReasonerState.KB_CHANGED) { - - final boolean noRules = this.knowledgeBase.getRules().isEmpty(); - this.correctness = noRules ? Correctness.SOUND_BUT_INCOMPLETE : Correctness.INCORRECT; - } - } - - /** - * Check if reasoner is closed and throw an exception if it is. - * - * @throws ReasonerStateException - */ - void validateNotClosed() throws ReasonerStateException { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.error("Invalid operation requested on a closed reasoner object!"); - throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); - } - } - -} +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; +import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; +import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.CyclicCheckResult; + +/* + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Reasoner implementation using the VLog backend. + * + * + * + * @author Markus Kroetzsch + * + */ +public class VLogReasoner implements Reasoner { + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); + + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return predicate.equals(other.predicate); + } + + } + + /** + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + * + */ + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + edbPredicates.clear(); + idbPredicates.clear(); + aliasedEdbPredicates.clear(); + aliasesForEdbPredicates.clear(); + directEdbFacts.clear(); + rules.clear(); + } + + @Override + public Void visit(Fact statement) { + final Predicate predicate = statement.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList(); + facts.add(statement); + directEdbFacts.put(predicate, facts); + } else { + directEdbFacts.get(predicate).add(statement); + } + return null; + } + + @Override + public Void visit(Rule statement) { + rules.add(statement); + for (final PositiveLiteral positiveLiteral : statement.getHead()) { + final Predicate predicate = positiveLiteral.getPredicate(); + if (!idbPredicates.contains(predicate)) { + if (edbPredicates.containsKey(predicate)) { + addEdbAlias(edbPredicates.get(predicate)); + edbPredicates.remove(predicate); + } + idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { + if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); + if (currentMainDeclaration == null) { + edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), + predicate.getArity()); + } + aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new UniversalVariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), + new ConjunctionImpl(Arrays.asList(body))); + rules.add(rule); + } + + } + + final KnowledgeBase knowledgeBase; + final VLog vLog = new VLog(); + + final Map aliasesForEdbPredicates = new HashMap<>(); + final Set idbPredicates = new HashSet<>(); + final Map edbPredicates = new HashMap<>(); + final Set aliasedEdbPredicates = new HashSet<>(); + final Map> directEdbFacts = new HashMap<>(); + final Set rules = new HashSet<>(); + + private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; + private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; + + private LogLevel internalLogLevel = LogLevel.WARNING; + private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; + private Integer timeoutAfterSeconds; + private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; + + /** + * Holds the state of the reasoning result. Has value {@code true} if reasoning + * has completed, {@code false} if it has been interrupted. + */ + private boolean reasoningCompleted; + + public VLogReasoner(KnowledgeBase knowledgeBase) { + super(); + this.knowledgeBase = knowledgeBase; + this.knowledgeBase.addListener(this); + + setLogLevel(this.internalLogLevel); + } + + @Override + public KnowledgeBase getKnowledgeBase() { + return this.knowledgeBase; + } + + @Override + public void setAlgorithm(final Algorithm algorithm) { + Validate.notNull(algorithm, "Algorithm cannot be null!"); + validateNotClosed(); + this.algorithm = algorithm; + } + + @Override + public Algorithm getAlgorithm() { + return this.algorithm; + } + + @Override + public void setReasoningTimeout(Integer seconds) { + validateNotClosed(); + if (seconds != null) { + Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); + } + this.timeoutAfterSeconds = seconds; + } + + @Override + public Integer getReasoningTimeout() { + return this.timeoutAfterSeconds; + } + + @Override + public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { + validateNotClosed(); + Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); + this.ruleRewriteStrategy = ruleRewritingStrategy; + } + + @Override + public RuleRewriteStrategy getRuleRewriteStrategy() { + return this.ruleRewriteStrategy; + } + + /* + * TODO Due to automatic predicate renaming, it can happen that an EDB predicate + * cannot be queried after loading unless reasoning has already been invoked + * (since the auxiliary rule that imports the EDB facts to the "real" predicate + * must be used). This issue could be weakened by rewriting queries to + * (single-source) EDB predicates internally when in such a state, + */ + // @Override + void load() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; + } + } + + void loadKnowledgeBase() throws IOException { + LOGGER.info("Started loading knowledge base ..."); + final LoadKbVisitor visitor = new LoadKbVisitor(); + visitor.clearIndexes(); + for (final Statement statement : knowledgeBase) { + statement.accept(visitor); + } + + if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { + LOGGER.warn("No facts have been provided."); + } + + try { + this.vLog.start(getDataSourcesConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + loadInMemoryDataSources(); + + validateDataSourcePredicateArities(); + + loadFacts(); + loadRules(); + + this.reasonerState = ReasonerState.KB_LOADED; + + // if there are no rules, then materialisation state is complete + this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + + LOGGER.info("Finished loading knowledge base."); + } + + String getDataSourcesConfigurationString() { + final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); + int dataSourceIndex = 0; + for (final Predicate predicate : this.edbPredicates.keySet()) { + final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, + dataSourceIndex, formatter); + } + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); + dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, + dataSourceIndex, formatter); + } + formatter.close(); + return configStringBuilder.toString(); + } + + int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, + Formatter formatter) { + if (dataSource != null) { + final String configString = dataSource.toConfigString(); + if (configString != null) { + formatter.format(dataSource.toConfigString(), dataSourceIndex, + ModelToVLogConverter.toVLogPredicate(predicate)); + return dataSourceIndex + 1; + } + } + return dataSourceIndex; + } + + /** + * Checks if the loaded external data sources do in fact contain data of the + * correct arity. + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { + for (final Predicate predicate : edbPredicates.keySet()) { + validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); + } + for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { + validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), + dataSourceDeclaration.getDataSource()); + } + } + + void loadInMemoryDataSources() { + for (final Predicate predicate : this.edbPredicates.keySet()) { + final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); + } + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); + loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); + } + } + + void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { + final InMemoryDataSource inMemoryDataSource; + if (dataSource instanceof InMemoryDataSource) { + inMemoryDataSource = (InMemoryDataSource) dataSource; + } else { + return; + } + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + } + + /** + * Checks if the loaded external data for a given source does in fact contain + * data of the correct arity for the given predidate. + * + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) + throws IncompatiblePredicateArityException { + if (dataSource == null) + return; + try { + final int dataSourcePredicateArity = this.vLog + .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); + if (dataSourcePredicateArity == -1) { + LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); + } else if (predicate.getArity() != dataSourcePredicateArity) { + throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + void loadFacts() { + for (final Predicate predicate : directEdbFacts.keySet()) { + Predicate aliasPredicate; + if (edbPredicates.containsKey(predicate)) { + aliasPredicate = predicate; + } else { + aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); + final String[][] vLogPredicateTuples = ModelToVLogConverter + .toVLogFactTuples(directEdbFacts.get(predicate)); + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : vLogPredicateTuples) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + } + } + + void loadRules() { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); + final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter + .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); + try { + this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); + if (LOGGER.isDebugEnabled()) { + for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { + LOGGER.debug("Loaded rule {}.", rule.toString()); + } + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + @Override + public boolean reason() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + load(); + runChase(); + break; + case KB_LOADED: + runChase(); + break; + case KB_CHANGED: + resetReasoner(); + load(); + runChase(); + break; + case MATERIALISED: + runChase(); + break; + default: + break; + } + + return this.reasoningCompleted; + } + + private void runChase() { + LOGGER.info("Started materialisation of inferences ..."); + this.reasonerState = ReasonerState.MATERIALISED; + + final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; + try { + if (this.timeoutAfterSeconds == null) { + this.vLog.materialize(skolemChase); + this.reasoningCompleted = true; + } else { + this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final MaterializationException e) { + // FIXME: the message generated here is not guaranteed to be the correct + // interpretation of the exception that is caught + throw new RuntimeException( + "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", + e); + } + + if (this.reasoningCompleted) { + this.correctness = Correctness.SOUND_AND_COMPLETE; + LOGGER.info("Completed materialisation of inferences."); + } else { + this.correctness = Correctness.SOUND_BUT_INCOMPLETE; + LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); + } + } + + @Override + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeBlanks) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + + final boolean filterBlanks = !includeBlanks; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + TermQueryResultIterator stringQueryResultIterator; + try { + stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); + } + + logWarningOnCorrectness(); + return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + } + + @Override + public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + final boolean includeBlanks) throws IOException { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); + Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); + + final boolean filterBlanks = !includeBlanks; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + try { + this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } catch (final NonExistingPredicateException e1) { + throw new IllegalArgumentException(MessageFormat.format( + "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); + } + + logWarningOnCorrectness(); + return this.correctness; + } + + private void logWarningOnCorrectness() { + if (this.correctness != Correctness.SOUND_AND_COMPLETE) { + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); + } + } + + @Override + public void resetReasoner() { + validateNotClosed(); + this.reasonerState = ReasonerState.KB_NOT_LOADED; + this.vLog.stop(); + LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); + } + + @Override + public void close() { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.info("Reasoner is already closed."); + } else { + this.reasonerState = ReasonerState.CLOSED; + this.knowledgeBase.deleteListener(this); + this.vLog.stop(); + LOGGER.info("Reasoner closed."); + } + } + + @Override + public void setLogLevel(LogLevel logLevel) { + validateNotClosed(); + Validate.notNull(logLevel, "Log level cannot be null!"); + this.internalLogLevel = logLevel; + this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); + } + + @Override + public LogLevel getLogLevel() { + return this.internalLogLevel; + } + + @Override + public void setLogFile(String filePath) { + validateNotClosed(); + this.vLog.setLogFile(filePath); + } + + @Override + public boolean isJA() { + return checkAcyclicity(AcyclicityNotion.JA); + } + + @Override + public boolean isRJA() { + return checkAcyclicity(AcyclicityNotion.RJA); + } + + @Override + public boolean isMFA() { + return checkAcyclicity(AcyclicityNotion.MFA); + } + + @Override + public boolean isRMFA() { + return checkAcyclicity(AcyclicityNotion.RMFA); + } + + @Override + public boolean isMFC() { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Checking rules acyclicity is not allowed before loading!"); + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic("MFC"); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.CYCLIC); + } + + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + load(); + } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); + } + + @Override + public CyclicityResult checkForCycles() { + final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); + if (acyclic) { + return CyclicityResult.ACYCLIC; + } else { + final boolean cyclic = isMFC(); + if (cyclic) { + return CyclicityResult.CYCLIC; + } + return CyclicityResult.UNDETERMINED; + } + } + + @Override + public void onStatementsAdded(List statementsAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementsAdded(statementsAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementAdded(Statement statementAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementAdded(statementAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementRemoved(Statement statementRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + @Override + public void onStatementsRemoved(List statementsRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + private void updateReasonerToKnowledgeBaseChanged() { + if (this.reasonerState.equals(ReasonerState.KB_LOADED) + || this.reasonerState.equals(ReasonerState.MATERIALISED)) { + + this.reasonerState = ReasonerState.KB_CHANGED; + } + } + + private void updateCorrectnessOnStatementsAdded() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + private void updateCorrectnessOnStatementsRemoved() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + /** + * Check if reasoner is closed and throw an exception if it is. + * + * @throws ReasonerStateException + */ + void validateNotClosed() throws ReasonerStateException { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.error("Invalid operation requested on a closed reasoner object!"); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); + } + } + + ReasonerState getReasonerState() { + return this.reasonerState; + } + + void setReasonerState(ReasonerState reasonerState) { + this.reasonerState = reasonerState; + } + +} \ No newline at end of file From de7487584a0aaaa1a449e70dbfccdd144d6e5f32 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 16:16:52 +0100 Subject: [PATCH 0640/1255] changed names --- .../org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index bdf9ac788..8ba1fb964 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -413,7 +413,7 @@ public void testBlankPredicateName() throws ParsingException { } @Test - public void predicateNormalIriEqualityTest() throws ParsingException { + public void predicateRelativeNumericIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); Fact f = RuleParser.parseFact("<1.e1>(a)."); Fact f2 = Expressions.makeFact("1.e1", a); @@ -421,7 +421,7 @@ public void predicateNormalIriEqualityTest() throws ParsingException { } @Test - public void predicateNormalIriEqualityTest2() throws ParsingException { + public void predicateAbsoluteIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); Fact f = RuleParser.parseFact("(a)."); Fact f2 = Expressions.makeFact("a:b", a); From da60ed0ec3a52d3520d4e685be999b9bce1c09a1 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 16:31:19 +0100 Subject: [PATCH 0641/1255] added some styles --- .../vlog4j/core/model/implementation/Serializer.java | 11 +++++++---- .../core/reasoner/implementation/VLogReasoner.java | 2 +- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ad3d345b2..122b9fc07 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -88,6 +88,10 @@ private static String checkRelativeAbsoluteIri(String string) { } } + private static String addQuote(String string) { + return QUOTE + string + QUOTE; + } + /** * Creates a String representation of a given {@link Rule}. * @@ -240,8 +244,7 @@ public static String getString(Conjunction conjunction) { * {@link LanguageStringConstant}. */ public static String getConstantName(LanguageStringConstant languageStringConstant) { - return QUOTE + escape(languageStringConstant.getString()) + QUOTE + AT - + languageStringConstant.getLanguageTag(); + return addQuote(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); } /** @@ -255,7 +258,7 @@ public static String getConstantName(LanguageStringConstant languageStringConsta */ public static String getString(DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return QUOTE + datatypeConstant.getLexicalValue() + QUOTE; + return addQuote(datatypeConstant.getLexicalValue()); } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -278,7 +281,7 @@ public static String getString(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(DatatypeConstant datatypeConstant) { - return QUOTE + escape(datatypeConstant.getLexicalValue()) + QUOTE + CARET + CARET + LESS_THAN + return addQuote(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + LESS_THAN + datatypeConstant.getDatatype() + MORE_THAN; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 5376e0896..91ed463f0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -809,4 +809,4 @@ void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } -} \ No newline at end of file +} From 2dd34cc6df70903acd1091d9583cb84d90649812 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 2 Dec 2019 21:38:11 +0100 Subject: [PATCH 0642/1255] fixed --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 91ed463f0..260db93e2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -808,5 +808,4 @@ ReasonerState getReasonerState() { void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } - } From 967430814ab7823bdb7899c7225b7e49d59e4757 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 17:05:19 +0100 Subject: [PATCH 0643/1255] Bump maven-surefire-plugin version --- pom.xml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 551a99f16..52d593caa 100644 --- a/pom.xml +++ b/pom.xml @@ -137,7 +137,7 @@
- org.eclipse.m2e lifecycle-mapping @@ -208,7 +208,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -218,6 +218,11 @@ 1.8 + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M4 + org.eluder.coveralls From cd21005e6db00bcb3cd7b526edd4b63df83ca792 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 17:38:23 +0100 Subject: [PATCH 0644/1255] Prefer jacoco over cobertura Fixes #85. --- coverage/LICENSE.txt | 201 +++++++++++++++++++++++++++++++++++++++++++ coverage/pom.xml | 76 ++++++++++++++++ pom.xml | 65 +++++++------- 3 files changed, 311 insertions(+), 31 deletions(-) create mode 100644 coverage/LICENSE.txt create mode 100644 coverage/pom.xml diff --git a/coverage/LICENSE.txt b/coverage/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/coverage/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/coverage/pom.xml b/coverage/pom.xml new file mode 100644 index 000000000..17c0c49b1 --- /dev/null +++ b/coverage/pom.xml @@ -0,0 +1,76 @@ + + + 4.0.0 + + + org.semanticweb.vlog4j + vlog4j-parent + 0.5.0-SNAPSHOT + + + coverage + + coverage + + + org.semanticweb.vlog4j + vlog4j-core + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-rdf + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-examples + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-owlapi + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-graal + 0.5.0-SNAPSHOT + + + org.semanticweb.vlog4j + vlog4j-parser + 0.5.0-SNAPSHOT + + + + + + + org.jacoco + jacoco-maven-plugin + + + aggregate-reports-ut + test + + report-aggregate + + + + + **/javacc/JavaCC*.class + **/javacc/JavaCharStream.class + **/javacc/ParseException.class + **/javacc/SimpleCharStream.class + **/javacc/Token.class + **/javacc/TokenMgrError.class + + + + + + + + diff --git a/pom.xml b/pom.xml index 52d593caa..0d33b88b2 100644 --- a/pom.xml +++ b/pom.xml @@ -15,6 +15,9 @@ https://github.com/knowsys/vlog4j + vlog4j-core vlog4j-rdf vlog4j-examples @@ -22,6 +25,7 @@ vlog4j-graal vlog4j-parser vlog4j-client + coverage @@ -200,6 +204,11 @@ + + org.jacoco + jacoco-maven-plugin + 0.8.5 + @@ -218,11 +227,16 @@ 1.8 - - org.apache.maven.plugins - maven-surefire-plugin - 3.0.0-M4 - + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M4 + + ${surefireArgLine} + 1C + true + + org.eluder.coveralls @@ -230,34 +244,23 @@ 4.3.0 - - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 - - xml - - 256m - - true - - - - **/javacc/JavaCC*.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class - - - + + org.jacoco + jacoco-maven-plugin + + + prepare-agent + + prepare-agent + + + surefireArgLine + + + - org.apache.maven.plugins maven-javadoc-plugin From 23218990877378b3becf30275d8312295d5c8d4d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 17:51:36 +0100 Subject: [PATCH 0645/1255] Update travis hooks to use jacoco --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 24e616797..98555fd73 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,7 @@ jdk: install: mvn install $OPTIONS -DskipTests=true after_success: - - mvn clean cobertura:cobertura coveralls:report + - mvn clean test jacoco:report coveralls:report dist: trusty sudo: false From bb16b979cf6be86753a59822b113d302d401a48b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 18:37:10 +0100 Subject: [PATCH 0646/1255] Update exclusion list for jacoco --- coverage/pom.xml | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 17c0c49b1..fcd928ba5 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -60,12 +60,14 @@ - **/javacc/JavaCC*.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class + **/javacc/JavaCCParser.java + **/javacc/JavaCCParserConstants.java + **/javacc/JavaCCParserTokenManager.java + **/javacc/JavaCharStream.java + **/javacc/ParseException.java + **/javacc/SimpleCharStream.java + **/javacc/Token.java + **/javacc/TokenMgrError.java From 046079e3c32828e0118b68c12d4f7dbe886965fd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 20:09:49 +0100 Subject: [PATCH 0647/1255] Fix exclusion of generated source files --- coverage/pom.xml | 18 +++++----------- pom.xml | 54 ++++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 53 insertions(+), 19 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index fcd928ba5..c3f0ccadb 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -47,6 +47,11 @@ + + + org.eluder.coveralls + coveralls-maven-plugin + org.jacoco jacoco-maven-plugin @@ -57,19 +62,6 @@ report-aggregate - - - - **/javacc/JavaCCParser.java - **/javacc/JavaCCParserConstants.java - **/javacc/JavaCCParserTokenManager.java - **/javacc/JavaCharStream.java - **/javacc/ParseException.java - **/javacc/SimpleCharStream.java - **/javacc/Token.java - **/javacc/TokenMgrError.java - - diff --git a/pom.xml b/pom.xml index 0d33b88b2..bf1cdca27 100644 --- a/pom.xml +++ b/pom.xml @@ -238,10 +238,25 @@ - - org.eluder.coveralls - coveralls-maven-plugin - 4.3.0 + + org.eluder.coveralls + coveralls-maven-plugin + 4.3.0 + + + coverage/target/site/jacoco-aggregate/jacoco.xml + + + + + + javax.xml.bind + jaxb-api + 2.3.1 + + @@ -257,11 +272,38 @@ surefireArgLine + + default-cli + + report + + test + + + ${project.reporting.outputDirectory}/jacoco-ut + + + + + + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError* + + - + org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} From 0140a95c2ad70e7bf5276ade6a4bdcdc34ad668a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 20:15:42 +0100 Subject: [PATCH 0648/1255] Coverage: add vlog4j-client --- coverage/pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/coverage/pom.xml b/coverage/pom.xml index c3f0ccadb..4cbd79d9b 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -43,6 +43,11 @@ vlog4j-parser 0.5.0-SNAPSHOT + + org.semanticweb.vlog4j + vlog4j-client + 0.5.0-SNAPSHOT + From 1823d24f2990d3e8abd40b8e4e8bb39be0388fa4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 21:59:50 +0100 Subject: [PATCH 0649/1255] Coverage: exclude vlog4j-examples --- coverage/pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 4cbd79d9b..e158f1d91 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -23,11 +23,6 @@ vlog4j-rdf 0.5.0-SNAPSHOT - - org.semanticweb.vlog4j - vlog4j-examples - 0.5.0-SNAPSHOT - org.semanticweb.vlog4j vlog4j-owlapi From 72e81023f2bc4b545caa5d4c159052e85c02a6c2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 14:33:07 +0100 Subject: [PATCH 0650/1255] Rename client package to org.semanticweb.vlog4j.client --- vlog4j-client/pom.xml | 2 +- .../vlog4j/client/picocli/ClientUtils.java | 10 +++++----- .../vlog4j/client/picocli/PrintQueryResults.java | 14 +++++++------- .../vlog4j/client/picocli/SaveModel.java | 12 ++++++------ .../vlog4j/client/picocli/SaveQueryResults.java | 12 ++++++------ .../vlog4j/client/picocli/VLog4jClient.java | 8 ++++---- .../client/picocli/VLog4jClientMaterialize.java | 8 ++++---- .../client/picocli/PrintQueryResultsTest.java | 8 ++++---- .../vlog4j/client/picocli/SaveModelTest.java | 8 ++++---- .../client/picocli/SaveQueryResultsTest.java | 6 +++--- 10 files changed, 44 insertions(+), 44 deletions(-) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/ClientUtils.java (98%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/PrintQueryResults.java (97%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveModel.java (98%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveQueryResults.java (98%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/VLog4jClient.java (95%) rename vlog4j-client/src/main/java/org/{ => semanticweb}/vlog4j/client/picocli/VLog4jClientMaterialize.java (99%) rename vlog4j-client/src/test/java/org/{ => semanticweb}/vlog4j/client/picocli/PrintQueryResultsTest.java (98%) rename vlog4j-client/src/test/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveModelTest.java (99%) rename vlog4j-client/src/test/java/org/{ => semanticweb}/vlog4j/client/picocli/SaveQueryResultsTest.java (98%) diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index fbee05c25..f6f9d5169 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -67,7 +67,7 @@ - org.vlog4j.client.picocli.VLog4jClient + org.semanticweb.vlog4j.client.picocli.VLog4jClient diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java similarity index 98% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java index 5893f86c2..35b10b2b0 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/ClientUtils.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -32,7 +32,7 @@ /** * Utility class for interacting with the vlog4j client. - * + * * @author dragoste * */ @@ -53,7 +53,7 @@ private ClientUtils() { * restrict the logging messages that are shown on the console or to change * their formatting. See the documentation of Log4J for details on how to do * this. - * + * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java similarity index 97% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java index f80c226f2..b9d656f3d 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,7 @@ /** * Helper class to print query results. - * + * * @author Larry Gonzalez * */ @@ -35,7 +35,7 @@ public class PrintQueryResults { /** * If true, Vlog4jClient will print the size of the query result. Mutually * exclusive with {@code --print-complete-query-result} - * + * * @default true */ @Option(names = "--print-query-result-size", description = "Boolean. If true, Vlog4jClient will print the size of the query result. True by default.") @@ -44,7 +44,7 @@ public class PrintQueryResults { /** * If true, Vlog4jClient will print the query result in stdout. Mutually * exclusive with {@code --print-query-result-size} - * + * * @default false */ @Option(names = "--print-complete-query-result", description = "Boolean. If true, Vlog4jClient will print the query result in stdout. False by default.") @@ -61,7 +61,7 @@ public PrintQueryResults(final boolean sizeOnly, final boolean complete) { /** * Check correct configuration of the class. @code{--print-query-result-size} * and @code{--print-query-result} are mutually exclusive. - * + * * @return @code{true} if configuration is valid. */ public boolean isValid() { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java similarity index 98% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java index fb03cf117..12be0e9d0 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,7 @@ /** * Helper class to save the resulting model of the materialization process. - * + * * @author Larry Gonzalez * */ @@ -65,7 +65,7 @@ public SaveModel(final boolean saveModel, final String outputDir) { /** * Check correct configuration of the class. If @code{--save-model} is true, * then a non-empty @code{--output-model-directory} is required. - * + * * @return @code{true} if configuration is valid. */ public boolean isConfigurationValid() { @@ -74,7 +74,7 @@ public boolean isConfigurationValid() { /** * Check that the path to store the model is either non-existing or a directory. - * + * * @return @code{true} if conditions are satisfied. */ public boolean isDirectoryValid() { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java similarity index 98% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java index 441aa359f..ce0345bd0 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,7 @@ /** * Helper class to save query results. - * + * * @author Larry Gonzalez * */ @@ -66,7 +66,7 @@ public SaveQueryResults(final boolean saveResults, final String outputDir) { /** * Check correct configuration of the class. If @code{--save-query-results} is * true, then a non-empty @code{--output-query-result-directory} is required. - * + * * @return @code{true} if configuration is valid. */ public boolean isConfigurationValid() { @@ -77,7 +77,7 @@ public boolean isConfigurationValid() { /** * Check that the path to store the query results is either non-existing or a * directory. - * + * * @return @code{true} if conditions are satisfied. */ public boolean isDirectoryValid() { diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java similarity index 95% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java index f35f01fb6..a0535ec79 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClient.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,7 @@ /** * Stand alone client for VLog4j. - * + * * @author Larry Gonzalez * */ diff --git a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java similarity index 99% rename from vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java rename to vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index 2cb915dac..af76e931a 100644 --- a/vlog4j-client/src/main/java/org/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -40,7 +40,7 @@ /** * Class to implement a command to execute full materialization. - * + * * @author Larry Gonzalez * */ diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java similarity index 98% rename from vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java rename to vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java index b949a5ff3..167b66fb4 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,7 +36,7 @@ public class PrintQueryResultsTest { sizeTrueCompleteTrue.setComplete(true); sizeFalseCompleteFalse.setSizeOnly(false); } - + @Test public void isValid_sizeTrueCompleteFalse_valid() { // default configuration diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java similarity index 99% rename from vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java rename to vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java index 5be484df7..17074eb37 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveModelTest.java +++ b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -20,9 +20,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -42,7 +42,7 @@ public class SaveModelTest { private final static SaveModel saveFalseDefaultDir = new SaveModel(); private final static SaveModel saveFalseEmptyDir = new SaveModel(false, ""); private final static SaveModel saveFalseNullDir = new SaveModel(false, null); - + static { saveTrueDefaultDir.setSaveModel(true); saveFalseDefaultDir.setSaveModel(false); diff --git a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java similarity index 98% rename from vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java rename to vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java index b7f684f61..124511f5d 100644 --- a/vlog4j-client/src/test/java/org/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java @@ -1,4 +1,4 @@ -package org.vlog4j.client.picocli; +package org.semanticweb.vlog4j.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -19,9 +19,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From cf6811318dd776f0f560072e62ba721b620fc847 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 4 Oct 2019 17:25:35 +0200 Subject: [PATCH 0651/1255] Add editorconfig --- .editorconfig | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..1b5ced5b4 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,13 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = tab +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +trim_trailing_whitespace = false +insert_final_newline = false From 386e803897db9a204fef982c10152cde9a0e5329 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 4 Oct 2019 00:26:19 +0200 Subject: [PATCH 0652/1255] Ignore TAGS file --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index b760aec6a..15e338099 100644 --- a/.gitignore +++ b/.gitignore @@ -54,3 +54,4 @@ vlog4j-examples/src/main/data/output/* vlog4j-examples/src/main/data/logs/* vlog4j-rdf/src/main/data/output/* /build-vlog/vlog/ +/TAGS From f8fa375ba0bf06cff3dc2a6f7fd46a1a29524aa6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 14:36:41 +0100 Subject: [PATCH 0653/1255] Remove final on picocli @Options --- .../vlog4j/client/picocli/VLog4jClientMaterialize.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index af76e931a..fd566f667 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -59,19 +59,19 @@ public class VLog4jClientMaterialize implements Runnable { // private List graalRuleFiles = new ArrayList<>(); @Option(names = "--log-level", description = "Log level of VLog (c++ library). One of: DEBUG, INFO, WARNING (default), ERROR.", required = false) - private final LogLevel logLevel = LogLevel.WARNING; + private LogLevel logLevel = LogLevel.WARNING; @Option(names = "--log-file", description = "Log file of VLog (c++ library). VLog will log to the default system output by default", required = false) private String logFile; @Option(names = "--chase-algorithm", description = "Chase algorithm. RESTRICTED_CHASE (default) or SKOLEM_CHASE.", required = false) - private final Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; + private Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; @Option(names = "--timeout", description = "Timeout in seconds. Infinite by default", required = false) - private final int timeout = 0; + private int timeout = 0; @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. Vlog4jClient will print the size of its extension", required = true) - private final List queryStrings = new ArrayList<>(); + private List queryStrings = new ArrayList<>(); @ArgGroup(exclusive = false) private final PrintQueryResults printQueryResults = new PrintQueryResults(); From b917bb6877d4d7e62d0647c423f699fc1674ce78 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 14:58:21 +0100 Subject: [PATCH 0654/1255] Update travis configuration --- .editorconfig | 3 +++ .travis.yml | 32 +++++++++++++++++++++++--------- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/.editorconfig b/.editorconfig index 1b5ced5b4..b8d1c9f09 100644 --- a/.editorconfig +++ b/.editorconfig @@ -11,3 +11,6 @@ trim_trailing_whitespace = true [*.md] trim_trailing_whitespace = false insert_final_newline = false + +[*.yml] +indent_style = space diff --git a/.travis.yml b/.travis.yml index 98555fd73..49a0fac52 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,21 +1,35 @@ language: java -jdk: - - openjdk8 -# - oraclejdk8 -# - oraclejdk9 +matrix: + include: + - os: linux + dist: bionic + jdk: + - openjdk11 + + - os: linux + dist: trusty + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-7 + jdk: + - openjdk8 + + - os: osx + ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -#before_install: - # - sudo apt-get install gcc-5 -y - # - eval “CC=gcc-5 && CXX=g++-5” - # - sh ./build-vlog-library.sh +before_install: + - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true after_success: - mvn clean test jacoco:report coveralls:report -dist: trusty +dist: bionic sudo: false cache: From 7f5c2170b2142b25cb43018317be02e6c8a0adfd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 15:45:32 +0100 Subject: [PATCH 0655/1255] Move modules into a profile to avoid cyclic dependencies `install-vlog-library.sh` calls `mvn initialize -Pdevelopment`, so we need to avoid having a module with a dependency on `vlog4j-core` in the reactor, since it has not been built yet. --- pom.xml | 45 +++-- vlog4j-core/LICENSE.txt | 402 ++++++++++++++++++++-------------------- 2 files changed, 232 insertions(+), 215 deletions(-) diff --git a/pom.xml b/pom.xml index bf1cdca27..05878fc60 100644 --- a/pom.xml +++ b/pom.xml @@ -14,20 +14,6 @@ A Java library for working with the VLog rule engine https://github.com/knowsys/vlog4j - - - vlog4j-core - vlog4j-rdf - vlog4j-examples - vlog4j-owlapi - vlog4j-graal - vlog4j-parser - vlog4j-client - coverage - - Apache License, Version 2.0 @@ -329,6 +315,37 @@ + + + client + + true + + + + vlog4j-core + vlog4j-rdf + vlog4j-examples + vlog4j-owlapi + vlog4j-graal + vlog4j-parser + vlog4j-client + coverage + + + + + development + + vlog4j-core + + sign diff --git a/vlog4j-core/LICENSE.txt b/vlog4j-core/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/vlog4j-core/LICENSE.txt +++ b/vlog4j-core/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From 5ac1885fff58197310bf6a572656157892e7c4f6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:07:23 +0100 Subject: [PATCH 0656/1255] Force gcc-7 for builds on trusty --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 49a0fac52..b9435d927 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,6 +16,7 @@ matrix: - g++-7 jdk: - openjdk8 + env: CC=gcc-7 CXX=g++-7 - os: osx From 8baee23e8626fa50a597b545cb62828e8c3da2f3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:25:52 +0100 Subject: [PATCH 0657/1255] Disable jacoco in development profile so we don't toggle modules --- pom.xml | 71 +++++++++++++++++++++++++++++---------------------------- 1 file changed, 36 insertions(+), 35 deletions(-) diff --git a/pom.xml b/pom.xml index 05878fc60..aadb0e2d2 100644 --- a/pom.xml +++ b/pom.xml @@ -14,6 +14,20 @@ A Java library for working with the VLog rule engine https://github.com/knowsys/vlog4j + + + vlog4j-core + vlog4j-rdf + vlog4j-examples + vlog4j-owlapi + vlog4j-graal + vlog4j-parser + vlog4j-client + + + Apache License, Version 2.0 @@ -266,9 +280,9 @@ test + coveralls plugin will not try to aggregate + this into the final coverage report, since we + want to control aggregation ourselves. --> ${project.reporting.outputDirectory}/jacoco-ut @@ -289,7 +303,7 @@ + docs for upload to github: javadoc:aggregate --> org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} @@ -315,37 +329,6 @@ - - - client - - true - - - - vlog4j-core - vlog4j-rdf - vlog4j-examples - vlog4j-owlapi - vlog4j-graal - vlog4j-parser - vlog4j-client - coverage - - - - - development - - vlog4j-core - - sign @@ -405,6 +388,24 @@ + + development + + + + + org.jacoco + jacoco-maven-plugin + + + prepare-agent + none + + + + + + From bb98795f17d272f4a35c77f06603201b8d993344 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:27:45 +0100 Subject: [PATCH 0658/1255] Go back to released vlog4j-base --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b9435d927..bacd83438 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,6 +14,7 @@ matrix: - ubuntu-toolchain-r-test packages: - g++-7 + - libstdc++6 jdk: - openjdk8 env: CC=gcc-7 CXX=g++-7 @@ -22,8 +23,8 @@ matrix: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -before_install: - - sh ./build-vlog-library.sh +# before_install: +# - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 9fa84f92e0ea8e1e2a77ca9ab9327b48752fab62 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 16:50:35 +0100 Subject: [PATCH 0659/1255] Also build on xenial --- .travis.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index bacd83438..d6ce3d867 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,11 +13,14 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - g++-7 - - libstdc++6 + - gcc-5 + - g++-5 jdk: - openjdk8 - env: CC=gcc-7 CXX=g++-7 + env: CC=gcc-5 CXX=g++-5 + + - os: linux + dist: xenial - os: osx From 3f38a2c462e21f01b603e415c7fc492988f68470 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 17:48:01 +0100 Subject: [PATCH 0660/1255] Use gcc-6 on trusty --- .travis.yml | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index d6ce3d867..1832e1701 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,14 +13,24 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-5 - - g++-5 + - gcc-6 + - g++-6 + - libstdc++-6 jdk: - openjdk8 - env: CC=gcc-5 CXX=g++-5 + env: CC=gcc-6 CXX=g++-6 - os: linux dist: xenial + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - gcc-7 + - g++-7 + - libstdc++-7 + env: CC=gcc-7 CXX=g++-7 - os: osx From 36fa43c0b736cc176f05d03e8b1085e7d95a97b7 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 17:53:10 +0100 Subject: [PATCH 0661/1255] Fix package name for libstdc++ --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1832e1701..dcfeee2a7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ matrix: packages: - gcc-6 - g++-6 - - libstdc++-6 + - libstdc++6 jdk: - openjdk8 env: CC=gcc-6 CXX=g++-6 @@ -29,7 +29,7 @@ matrix: packages: - gcc-7 - g++-7 - - libstdc++-7 + - libstdc++6 env: CC=gcc-7 CXX=g++-7 - os: osx From 2d4c1ca3ee048511c0785d457ba329a4c2736a13 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 18:16:57 +0100 Subject: [PATCH 0662/1255] Use gcc-7 on trusty --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index dcfeee2a7..e82d92701 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,12 +13,12 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-6 - - g++-6 + - gcc-7 + - g++-7 - libstdc++6 jdk: - openjdk8 - env: CC=gcc-6 CXX=g++-6 + env: CC=gcc-7 CXX=g++-7 - os: linux dist: xenial From e4bac2b5284cc0faa14c3e63268e031805bafa14 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 18:26:59 +0100 Subject: [PATCH 0663/1255] Don't forget to include coverage module --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index aadb0e2d2..98b1b8ce1 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ vlog4j-graal vlog4j-parser vlog4j-client - + coverage From a123f9ca59e58f6a4abdefcbb7abd8347ca477ca Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Dec 2019 18:28:09 +0100 Subject: [PATCH 0664/1255] Allow trusty to fail --- .travis.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index e82d92701..ab63b97bf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,12 +13,12 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-7 - - g++-7 + - gcc-6 + - g++-6 - libstdc++6 jdk: - openjdk8 - env: CC=gcc-7 CXX=g++-7 + env: CC=gcc-6 CXX=g++-6 - os: linux dist: xenial @@ -35,6 +35,11 @@ matrix: - os: osx +jobs: + allow_failures: + - dist: trusty + + ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar # before_install: # - sh ./build-vlog-library.sh From 74b4783c8e6714c58421edf481c711928ac4cbc9 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 3 Dec 2019 13:10:42 +0100 Subject: [PATCH 0665/1255] Try gcc-6 on xenial --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index ab63b97bf..1b1b9a53f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,10 +27,10 @@ matrix: sources: - ubuntu-toolchain-r-test packages: - - gcc-7 - - g++-7 + - gcc-6 + - g++-6 - libstdc++6 - env: CC=gcc-7 CXX=g++-7 + env: CC=gcc-6 CXX=g++-6 - os: osx From af7b5414c7537c55ec6c24f5c542e08d37125e9c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 3 Dec 2019 16:44:21 +0100 Subject: [PATCH 0666/1255] fixes bug #139 + some refactoring --- .../core/model/implementation/Serializer.java | 252 ++++++++++++------ .../implementation/CsvFileDataSource.java | 3 +- .../SparqlQueryResultDataSource.java | 32 ++- .../core/model/DataSourceDeclarationTest.java | 81 ++++-- 4 files changed, 241 insertions(+), 127 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 122b9fc07..0f9778264 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.model.implementation; +import org.apache.commons.lang3.StringUtils; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -36,6 +37,10 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; /** * A utility class with static methods to obtain the correct parsable string @@ -55,18 +60,22 @@ public final class Serializer { public static final String CLOSING_PARENTHESIS = ")"; public static final String RULE_SEPARATOR = " :- "; public static final String AT = "@"; - public static final String SOURCE = "@source "; + public static final String DATA_SOURCE = "@source "; + public static final String CSV_FILE_DATA_SOURCE = "load-csv"; + private static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + private static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; public static final String COLON = ": "; public static final String COLON_UNSPACED = ":"; public static final String CARET = "^"; public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; public static final String QUOTE = "\""; - public static final String DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String INTEGER = "^[-+]?\\d+$"; - public static final String DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String TRUE = "true"; - public static final String FALSE = "false"; + + public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String REGEX_INTEGER = "^[-+]?\\d+$"; + public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String REGEX_TRUE = "true"; + public static final String REGEX_FALSE = "false"; /** * Constructor. @@ -75,28 +84,12 @@ private Serializer() { } - private static String escape(String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\""); - } - - private static String checkRelativeAbsoluteIri(String string) { - if ((string.contains(COLON_UNSPACED) || string.matches(INTEGER) || string.matches(DOUBLE) - || string.matches(DECIMAL) || string.equals(TRUE) || string.equals(FALSE))) { - return LESS_THAN + string + MORE_THAN; - } else { - return string; - } - } - - private static String addQuote(String string) { - return QUOTE + string + QUOTE; - } - /** * Creates a String representation of a given {@link Rule}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param rule a {@link Rule}. + * @param rule + * a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * */ @@ -104,11 +97,34 @@ public static String getString(Rule rule) { return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + DOT; } + /** + * Creates a String representation of a given {@link Conjunction}. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param conjunction + * a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ + public static String getString(Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + stringBuilder.append(getString(literal)); + } + return stringBuilder.toString(); + } + /** * Creates a String representation of a given {@link Literal}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param literal a {@link Literal} + * @param literal + * a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ public static String getString(Literal literal) { @@ -116,7 +132,7 @@ public static String getString(Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(checkRelativeAbsoluteIri(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); + stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -124,7 +140,7 @@ public static String getString(Literal literal) { } else { stringBuilder.append(COMMA); } - String string = term.getSyntacticRepresentation(); + final String string = term.getSyntacticRepresentation(); stringBuilder.append(string); } stringBuilder.append(CLOSING_PARENTHESIS); @@ -135,7 +151,8 @@ public static String getString(Literal literal) { * Creates a String representation of a given {@link Fact}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param fact a {@link Fact} + * @param fact + * a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ public static String getFactString(Fact fact) { @@ -146,18 +163,73 @@ public static String getFactString(Fact fact) { * Creates a String representation of a given {@link Constant}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param constant a {@link Constant} + * @param constant + * a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ public static String getString(AbstractConstant constant) { - return checkRelativeAbsoluteIri(constant.getName()); + return getIRIString(constant.getName()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param languageStringConstant + * a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(LanguageStringConstant languageStringConstant) { + return addQuotes(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} without an IRI. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param datatypeConstant + * a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(DatatypeConstant datatypeConstant) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { + return addQuotes(datatypeConstant.getLexicalValue()); + } else { + if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); + } else { + return getConstantName(datatypeConstant); + } + } + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @param datatypeConstant + * a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(DatatypeConstant datatypeConstant) { + return addQuotes(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + + addAngleBrackets(datatypeConstant.getDatatype()); } /** * Creates a String representation of a given {@link ExistentialVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param existentialVariable a {@link ExistentialVariable} + * @param existentialVariable + * a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. */ @@ -169,7 +241,8 @@ public static String getString(ExistentialVariable existentialVariable) { * Creates a String representation of a given {@link UniversalVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param universalVariable a {@link UniversalVariable} + * @param universalVariable + * a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. */ @@ -181,7 +254,8 @@ public static String getString(UniversalVariable universalVariable) { * Creates a String representation of a given {@link NamedNull}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param namedNull a {@link NamedNull} + * @param namedNull + * a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ public static String getString(NamedNull namedNull) { @@ -192,7 +266,8 @@ public static String getString(NamedNull namedNull) { * Creates a String representation of a given {@link Predicate}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param predicate a {@link Predicate} + * @param predicate + * a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(Predicate predicate) { @@ -203,86 +278,87 @@ public static String getString(Predicate predicate) { * Creates a String representation of a given {@link DataSourceDeclaration}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @param dataSourceDeclaration + * a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. */ public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS + return DATA_SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS + dataSourceDeclaration.getPredicate().getArity() + CLOSING_PARENTHESIS + COLON + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); } /** - * Creates a String representation of a given {@link Conjunction}. + * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. + * @see <"https://github.com/knowsys/vlog4j/wiki">. + * + * @param csvFileDataSource + * @return String representation corresponding to a given + * {@link CsvFileDataSource}. */ - public static String getString(Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); + public static String getString(CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS + + StringUtils.SPACE + DOT; } /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. + * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. + * @see <"https://github.com/knowsys/vlog4j/wiki">. + * + * + * @param rdfFileDataSource + * @return String representation corresponding to a given + * {@link RdfFileDataSource}. */ - public static String getConstantName(LanguageStringConstant languageStringConstant) { - return addQuote(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); + public static String getString(RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS + + StringUtils.SPACE + DOT; } /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} without an IRI. + * Creates a String representation of a given + * {@link SparqlQueryResultDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant a {@link DatatypeConstant} + * @see <"https://github.com/knowsys/vlog4j/wiki">. + * + * + * @param dataSource * @return String representation corresponding to a given - * {@link DatatypeConstant}. + * {@link SparqlQueryResultDataSource}. */ - public static String getString(DatatypeConstant datatypeConstant) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return addQuote(datatypeConstant.getLexicalValue()); - } else { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } else { - return getConstantName(datatypeConstant); - } + public static String getString(SparqlQueryResultDataSource dataSource) { + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPEN_PARENTHESIS + + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + + CLOSING_PARENTHESIS + StringUtils.SPACE + DOT; + } + + private static String getFileString(FileDataSource fileDataSource) { + return addQuotes(fileDataSource.getFile().toString()); + } + private static String getIRIString(String string) { + if (string.contains(COLON_UNSPACED) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { + return addAngleBrackets(string); + } else { + return string; } } - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(DatatypeConstant datatypeConstant) { - return addQuote(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + LESS_THAN - + datatypeConstant.getDatatype() + MORE_THAN; + private static String escape(String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\""); + } + + private static String addQuotes(String string) { + return QUOTE + string + QUOTE; + } + + private static String addAngleBrackets(String string) { + return LESS_THAN + string + MORE_THAN; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java index 2fa42eb07..7998dd466 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java @@ -25,6 +25,7 @@ import java.util.Arrays; import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of @@ -75,7 +76,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return "load-csv(\"" + getFile() + "\") ."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index ace59318b..8eb8168b6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -27,6 +27,7 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a @@ -96,15 +97,15 @@ public SparqlQueryResultDataSource(@NonNull final URL endpoint, } public URL getEndpoint() { - return endpoint; + return this.endpoint; } public String getQueryBody() { - return queryBody; + return this.queryBody; } public String getQueryVariables() { - return queryVariables; + return this.queryVariables; } @Override @@ -115,9 +116,9 @@ public final String toConfigString() { DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + endpoint + "\n" + "EDB%1$d_param1=" + queryVariables + "\n" + + "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + - "EDB%1$d_param2=" + queryBody + "\n"; + "EDB%1$d_param2=" + this.queryBody + "\n"; return configStringPattern; } @@ -138,20 +139,23 @@ static String getQueryVariablesList(LinkedHashSet queryVariables) { public int hashCode() { final int prime = 31; int result = 1; - result = prime * result + endpoint.hashCode(); - result = prime * result + queryBody.hashCode(); - result = prime * result + queryVariables.hashCode(); + result = prime * result + this.endpoint.hashCode(); + result = prime * result + this.queryBody.hashCode(); + result = prime * result + this.queryVariables.hashCode(); return result; } @Override public boolean equals(Object obj) { - if (this == obj) + if (this == obj) { return true; - if (obj == null) + } + if (obj == null) { return false; - if (getClass() != obj.getClass()) + } + if (getClass() != obj.getClass()) { return false; + } final SparqlQueryResultDataSource other = (SparqlQueryResultDataSource) obj; return this.endpoint.equals(other.getEndpoint()) && this.queryVariables.equals(other.getQueryVariables()) && this.queryBody.equals(other.getQueryBody()); @@ -159,13 +163,13 @@ public boolean equals(Object obj) { @Override public String toString() { - return "SparqlQueryResultDataSource [endpoint=" + endpoint + ", queryVariables=" + queryVariables - + ", queryBody=" + queryBody + "]"; + return "SparqlQueryResultDataSource [endpoint=" + this.endpoint + ", queryVariables=" + this.queryVariables + + ", queryBody=" + this.queryBody + "]"; } @Override public String getSyntacticRepresentation() { - return "sparql(" + "<" + endpoint + ">" + ", \"" + queryVariables + "\"" + ", \"" + queryBody + "\") ."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 9ef7ef77d..5f6df4244 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -28,6 +28,7 @@ import java.net.MalformedURLException; import java.net.URL; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; @@ -35,29 +36,28 @@ import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; public class DataSourceDeclarationTest { @Test - public void equalityTest() throws MalformedURLException { - DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + public void testEquality() throws MalformedURLException { + final DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); - Predicate predicate1 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + final Predicate predicate1 = Expressions.makePredicate("p", 3); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + final DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", "?var wdt:P31 wd:Q5 ."); - Predicate predicate2 = Expressions.makePredicate("p", 3); - DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + final Predicate predicate2 = Expressions.makePredicate("p", 3); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); - DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", + final DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", "?var2 wdt:P31 wd:Q5 ."); - DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); - Predicate predicate4 = Expressions.makePredicate("q", 1); - DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + final Predicate predicate4 = Expressions.makePredicate("q", 1); + final DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); assertEquals(dataSourceDeclaration1, dataSourceDeclaration1); assertEquals(dataSourceDeclaration1, dataSourceDeclaration2); @@ -69,24 +69,57 @@ public void equalityTest() throws MalformedURLException { } @Test - public void DataSourceDeclarationToStringTest() throws IOException { - final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; - final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); - Predicate predicate1 = Expressions.makePredicate("p", 3); - Predicate predicate2 = Expressions.makePredicate("q", 1); + public void toString_SparqlQueryResultDataSource() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 3); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); - final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", + dataSourceDeclaration.toString()); + + } + + @Test + public void toString_CsvFileDataSource() throws IOException { + final Predicate predicate2 = Expressions.makePredicate("q", 1); + final String relativeDirName = "dir"; + final String fileName = "file.csv"; + + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(relativeDirName, fileName)); final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); + + final String expectedFilePath = relativeDirName + File.separator + fileName; + assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration2.toString()); + } + + // TODO: have String representation of files OS independent + @Ignore + @Test + public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { + final Predicate predicate2 = Expressions.makePredicate("q", 1); + // "D:\\VLOG\\java-api-applications\\vlog4j\\vlog4j\\vlog4j-core\\src\\test\\data\input\\file.csv"; + final String absoluteFilePathWindows = "D:\\input\\file.csv"; + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + unzippedCsvFileDataSource); + assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", + dataSourceDeclaration2.toString()); + } + + @Test + public void toString_RdfFileDataSource_relativePath() throws IOException { + final Predicate predicate2 = Expressions.makePredicate("q", 1); + final String relativeDirName = "dir"; + final String fileName = "file.nt"; + final File unzippedRdfFile = new File(relativeDirName, fileName); + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, unzippedRdfFileDataSource); - assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", - dataSourceDeclaration1.toString()); - assertEquals("@source q(1): load-csv(\"src/test/data/input/file.csv\") .", dataSourceDeclaration2.toString()); - assertEquals("@source q(1): load-rdf(\"src/test/data/input/file.nt\") .", dataSourceDeclaration3.toString()); + final String expectedFilePath = relativeDirName + File.separator + fileName; + assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", + dataSourceDeclaration3.toString()); } } From 94cac64e7c45a12455182fedd253642600694d1d Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 3 Dec 2019 22:49:53 +0100 Subject: [PATCH 0667/1255] Fix statement Entity syntactic representation to end with " ." --- .../core/model/implementation/Serializer.java | 113 ++++++++---------- .../implementation/InMemoryDataSource.java | 8 +- .../implementation/RdfFileDataSource.java | 5 +- .../core/model/DataSourceDeclarationTest.java | 21 ++-- .../vlog4j/core/model/FactTest.java | 18 +-- .../vlog4j/core/model/RuleImplTest.java | 6 +- 6 files changed, 76 insertions(+), 95 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 0f9778264..ce1d58990 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,6 +1,5 @@ package org.semanticweb.vlog4j.core.model.implementation; -import org.apache.commons.lang3.StringUtils; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -50,9 +49,9 @@ * */ public final class Serializer { - public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String STATEMENT_SEPARATOR = " ."; public static final String COMMA = ", "; - public static final String DOT = "."; + public static final String NEGATIVE_IDENTIFIER = "~"; public static final String EXISTENTIAL_IDENTIFIER = "!"; public static final String UNIVERSAL_IDENTIFIER = "?"; public static final String NAMEDNULL_IDENTIFIER = "_"; @@ -64,9 +63,9 @@ public final class Serializer { public static final String CSV_FILE_DATA_SOURCE = "load-csv"; private static final String RDF_FILE_DATA_SOURCE = "load-rdf"; private static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String COLON = ": "; - public static final String COLON_UNSPACED = ":"; - public static final String CARET = "^"; + public static final String DATA_SOURCE_SEPARATOR = ": "; + public static final String COLON = ":"; + public static final String DOUBLE_CARET = "^^"; public static final String LESS_THAN = "<"; public static final String MORE_THAN = ">"; public static final String QUOTE = "\""; @@ -88,24 +87,22 @@ private Serializer() { * Creates a String representation of a given {@link Rule}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param rule - * a {@link Rule}. + * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * */ - public static String getString(Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + DOT; + public static String getString(final Rule rule) { + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; } /** * Creates a String representation of a given {@link Conjunction}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param conjunction - * a {@link Conjunction} + * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ - public static String getString(Conjunction conjunction) { + public static String getString(final Conjunction conjunction) { final StringBuilder stringBuilder = new StringBuilder(); boolean first = true; for (final Literal literal : conjunction.getLiterals()) { @@ -123,11 +120,10 @@ public static String getString(Conjunction conjunction) { * Creates a String representation of a given {@link Literal}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param literal - * a {@link Literal} + * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ - public static String getString(Literal literal) { + public static String getString(final Literal literal) { final StringBuilder stringBuilder = new StringBuilder(""); if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); @@ -151,23 +147,21 @@ public static String getString(Literal literal) { * Creates a String representation of a given {@link Fact}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param fact - * a {@link Fact} + * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ - public static String getFactString(Fact fact) { - return getString(fact) + DOT; + public static String getFactString(final Fact fact) { + return getString(fact) + STATEMENT_SEPARATOR; } /** * Creates a String representation of a given {@link Constant}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param constant - * a {@link Constant} + * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ - public static String getString(AbstractConstant constant) { + public static String getString(final AbstractConstant constant) { return getIRIString(constant.getName()); } @@ -176,12 +170,11 @@ public static String getString(AbstractConstant constant) { * {@link LanguageStringConstant}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param languageStringConstant - * a {@link LanguageStringConstant} + * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. */ - public static String getConstantName(LanguageStringConstant languageStringConstant) { + public static String getConstantName(final LanguageStringConstant languageStringConstant) { return addQuotes(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); } @@ -190,12 +183,11 @@ public static String getConstantName(LanguageStringConstant languageStringConsta * {@link DatatypeConstant} without an IRI. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant - * a {@link DatatypeConstant} + * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getString(DatatypeConstant datatypeConstant) { + public static String getString(final DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { return addQuotes(datatypeConstant.getLexicalValue()); } else { @@ -214,13 +206,12 @@ public static String getString(DatatypeConstant datatypeConstant) { * {@link DatatypeConstant} including an IRI. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param datatypeConstant - * a {@link DatatypeConstant} + * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getConstantName(DatatypeConstant datatypeConstant) { - return addQuotes(escape(datatypeConstant.getLexicalValue())) + CARET + CARET + public static String getConstantName(final DatatypeConstant datatypeConstant) { + return addQuotes(escape(datatypeConstant.getLexicalValue())) + DOUBLE_CARET + addAngleBrackets(datatypeConstant.getDatatype()); } @@ -228,12 +219,11 @@ public static String getConstantName(DatatypeConstant datatypeConstant) { * Creates a String representation of a given {@link ExistentialVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param existentialVariable - * a {@link ExistentialVariable} + * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. */ - public static String getString(ExistentialVariable existentialVariable) { + public static String getString(final ExistentialVariable existentialVariable) { return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); } @@ -241,12 +231,11 @@ public static String getString(ExistentialVariable existentialVariable) { * Creates a String representation of a given {@link UniversalVariable}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param universalVariable - * a {@link UniversalVariable} + * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. */ - public static String getString(UniversalVariable universalVariable) { + public static String getString(final UniversalVariable universalVariable) { return UNIVERSAL_IDENTIFIER + universalVariable.getName(); } @@ -254,11 +243,10 @@ public static String getString(UniversalVariable universalVariable) { * Creates a String representation of a given {@link NamedNull}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param namedNull - * a {@link NamedNull} + * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ - public static String getString(NamedNull namedNull) { + public static String getString(final NamedNull namedNull) { return NAMEDNULL_IDENTIFIER + namedNull.getName(); } @@ -266,11 +254,10 @@ public static String getString(NamedNull namedNull) { * Creates a String representation of a given {@link Predicate}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param predicate - * a {@link Predicate} + * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ - public static String getString(Predicate predicate) { + public static String getString(final Predicate predicate) { return predicate.getName() + OPEN_PARENTHESIS + predicate.getArity() + CLOSING_PARENTHESIS; } @@ -278,15 +265,13 @@ public static String getString(Predicate predicate) { * Creates a String representation of a given {@link DataSourceDeclaration}. * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. - * @param dataSourceDeclaration - * a {@link DataSourceDeclaration} + * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. */ - public static String getString(DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + dataSourceDeclaration.getPredicate().getName() + OPEN_PARENTHESIS - + dataSourceDeclaration.getPredicate().getArity() + CLOSING_PARENTHESIS + COLON - + dataSourceDeclaration.getDataSource().getSyntacticRepresentation(); + public static String getString(final DataSourceDeclaration dataSourceDeclaration) { + return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR + + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; } /** @@ -298,9 +283,8 @@ public static String getString(DataSourceDeclaration dataSourceDeclaration) { * @return String representation corresponding to a given * {@link CsvFileDataSource}. */ - public static String getString(CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS - + StringUtils.SPACE + DOT; + public static String getString(final CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; } /** @@ -313,9 +297,8 @@ public static String getString(CsvFileDataSource csvFileDataSource) { * @return String representation corresponding to a given * {@link RdfFileDataSource}. */ - public static String getString(RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS - + StringUtils.SPACE + DOT; + public static String getString(final RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; } /** @@ -329,19 +312,19 @@ public static String getString(RdfFileDataSource rdfFileDataSource) { * @return String representation corresponding to a given * {@link SparqlQueryResultDataSource}. */ - public static String getString(SparqlQueryResultDataSource dataSource) { + public static String getString(final SparqlQueryResultDataSource dataSource) { return SPARQL_QUERY_RESULT_DATA_SOURCE + OPEN_PARENTHESIS + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS + StringUtils.SPACE + DOT; + + CLOSING_PARENTHESIS; } - private static String getFileString(FileDataSource fileDataSource) { + private static String getFileString(final FileDataSource fileDataSource) { return addQuotes(fileDataSource.getFile().toString()); } - private static String getIRIString(String string) { - if (string.contains(COLON_UNSPACED) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + private static String getIRIString(final String string) { + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { return addAngleBrackets(string); } else { @@ -349,15 +332,15 @@ private static String getIRIString(String string) { } } - private static String escape(String string) { + private static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\""); } - private static String addQuotes(String string) { + private static String addQuotes(final String string) { return QUOTE + string + QUOTE; } - private static String addAngleBrackets(String string) { + private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 3bd708525..e498cacf2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -94,15 +94,15 @@ public String[][] getData() { @Override public String getSyntacticRepresentation() { - StringBuilder facts = new StringBuilder( + StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < this.getData().length; i++) { for (int j = 0; j < data[i].length; j++) { - facts.append(data[i][j] + " "); + sb.append(data[i][j] + " "); } - facts.append("\n"); + sb.append("\n"); } - return facts.toString(); + return sb.toString(); } /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java index ee5cc49ee..e65515dc7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java @@ -25,6 +25,7 @@ import java.util.Arrays; import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside @@ -68,12 +69,12 @@ public RdfFileDataSource(@NonNull final File rdfFile) throws IOException { @Override public String toString() { - return "RdfFileDataSource [rdfFile=" + getFile() + "]"; + return "RdfFileDataSource [rdfFile=" + this.getFile() + "]"; } @Override public String getSyntacticRepresentation() { - return "load-rdf(\"" + getFile() + "\") ."; + return Serializer.getString(this); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index 5f6df4244..caf805b82 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -87,39 +87,36 @@ public void toString_CsvFileDataSource() throws IOException { final String fileName = "file.csv"; final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(relativeDirName, fileName)); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration2.toString()); + assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } - // TODO: have String representation of files OS independent + // FIXME: have String representation of files OS independent @Ignore @Test public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { - final Predicate predicate2 = Expressions.makePredicate("q", 1); - // "D:\\VLOG\\java-api-applications\\vlog4j\\vlog4j\\vlog4j-core\\src\\test\\data\input\\file.csv"; + final Predicate predicate = Expressions.makePredicate("q", 1); final String absoluteFilePathWindows = "D:\\input\\file.csv"; final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); - assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", - dataSourceDeclaration2.toString()); + assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); } @Test public void toString_RdfFileDataSource_relativePath() throws IOException { - final Predicate predicate2 = Expressions.makePredicate("q", 1); + final Predicate predicate = Expressions.makePredicate("q", 1); final String relativeDirName = "dir"; final String fileName = "file.nt"; final File unzippedRdfFile = new File(relativeDirName, fileName); final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", - dataSourceDeclaration3.toString()); + assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java index a94cdf86d..0de3182d3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java @@ -35,12 +35,12 @@ public class FactTest { @Test public void factsConstructor() { - Predicate p = Expressions.makePredicate("p", 2); - Constant c = Expressions.makeAbstractConstant("c"); - Constant d = Expressions.makeAbstractConstant("d"); - Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); - Fact f2 = Expressions.makeFact("p", Arrays.asList(c, d)); - Fact f3 = new FactImpl(p, Arrays.asList(c, d)); + final Predicate p = Expressions.makePredicate("p", 2); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); + final Fact f2 = Expressions.makeFact("p", Arrays.asList(c, d)); + final Fact f3 = new FactImpl(p, Arrays.asList(c, d)); assertEquals(f1, f2); assertEquals(f1, f3); assertEquals(f2, f3); @@ -48,8 +48,8 @@ public void factsConstructor() { @Test(expected = IllegalArgumentException.class) public void factsOnlyContainConstants() { - Predicate p = Expressions.makePredicate("p", 1); - Variable x = Expressions.makeUniversalVariable("X"); + final Predicate p = Expressions.makePredicate("p", 1); + final Variable x = Expressions.makeUniversalVariable("X"); new FactImpl(p, Arrays.asList(x)); } @@ -59,7 +59,7 @@ public void factToStringTest() { final Constant c = Expressions.makeAbstractConstant("c"); final Constant d = Expressions.makeAbstractConstant("d"); final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); - assertEquals("p(c, d).", f1.toString()); + assertEquals("p(c, d) .", f1.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java index 461a439f1..0a406ec18 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java @@ -152,7 +152,7 @@ public void ruleToStringTest() { final Variable y2 = Expressions.makeUniversalVariable("Y"); final Constant d = Expressions.makeAbstractConstant("d"); final Constant c = Expressions.makeAbstractConstant("c"); - LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); @@ -168,8 +168,8 @@ public void ruleToStringTest() { final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); - assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z).", rule1.toString()); - assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en).", rule2.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z) .", rule1.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en) .", rule2.toString()); } From 507d19475e4ccacab180be660dab9ab33c6595dd Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Tue, 3 Dec 2019 23:22:48 +0100 Subject: [PATCH 0668/1255] change vlog4j-base dependency version to release 1.3.2 --- vlog4j-core/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index 20e9568cb..a1c646e97 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -17,7 +17,7 @@ Core components of VLog4j: reasoner and model - 1.3.1 + 1.3.2 From ee7e9f86814465a6381160e48f133f4045b5ad37 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 11:39:30 +0100 Subject: [PATCH 0669/1255] Don't build on trusty --- .travis.yml | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1b1b9a53f..5921162f9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,20 +6,6 @@ matrix: jdk: - openjdk11 - - os: linux - dist: trusty - addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-6 - - g++-6 - - libstdc++6 - jdk: - - openjdk8 - env: CC=gcc-6 CXX=g++-6 - - os: linux dist: xenial addons: @@ -49,7 +35,6 @@ install: mvn install $OPTIONS -DskipTests=true after_success: - mvn clean test jacoco:report coveralls:report -dist: bionic sudo: false cache: From efd048296b468c715d0cb2c03319d6dbfd8cbc9c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 11:40:17 +0100 Subject: [PATCH 0670/1255] Bump macOS to 10.14 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 5921162f9..fde2269c2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,7 @@ matrix: env: CC=gcc-6 CXX=g++-6 - os: osx + osx_image: xcode10.2 jobs: From a0ad8f25a3348e510ff1db70a261d4b87d77e205 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 14:43:04 +0100 Subject: [PATCH 0671/1255] Update RELEASE-NOTES before release --- RELEASE-NOTES.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 049a8e72e..0819972b3 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -8,18 +8,24 @@ Breaking changes: * The data model for rules has been refined and changed: * Instead of Constant, specific types of constants are used to capture abtract and data values * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification - * Bland was renamed to NamedNull to avoid confusion with RDF blank nodes + * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes * Methods to access terms now use Java Streams and are unified across syntactic objects New features: -* ... +* New module vlog4j-client provides a stand-alone command line client jar for VLog4j +* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki Other improvements: * Data model is better aligned with syntax supported by parser +* Java object Statements (rules, facts, datasource declarations) String representation is parseable +* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) +* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) +* Cobertura test coverage tool has been replaced by JaCoCo Bugfixes: * Acyclicity checks work again without calling reason() first (issue #128) - +* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) +* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) VLog4j v0.4.0 ------------- From 15e16140fb3d4d8f0e178df209da5e5ba231b3b4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 14:55:47 +0100 Subject: [PATCH 0672/1255] Update README * vlog4j-client module * link to wiki --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index a52fb12f7..11f0be918 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,7 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API +* **vlog4j-client** stand-alone application that builds a command-line client for VLog4j The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: @@ -38,6 +39,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. +* VLog4j[Wiki](https://github.com/knowsys/vlog4j/wiki) is available online Development ----------- From 990f2fed89a42076b99ecea3d095c8c8412c9b76 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 14:56:57 +0100 Subject: [PATCH 0673/1255] Update README --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 11f0be918..943612a18 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API -* **vlog4j-client** stand-alone application that builds a command-line client for VLog4j +* **vlog4j-client**: stand-alone application that builds a command-line client for VLog4j The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: @@ -39,7 +39,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* VLog4j[Wiki](https://github.com/knowsys/vlog4j/wiki) is available online +* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online. Development ----------- From ed11e8c0dfaee02b978b30dece51dc04ed235abc Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 4 Dec 2019 15:09:09 +0100 Subject: [PATCH 0674/1255] update README with description about wiki --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 943612a18..19d2290a2 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API -* **vlog4j-client**: stand-alone application that builds a command-line client for VLog4j +* **vlog4j-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/vlog4j/wiki/Standalone-client) for VLog4j. The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: @@ -39,7 +39,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online. +* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language, and related publications. Development ----------- From bce7689ea8743bbb82f808d3f7d809b802193839 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 1 Oct 2019 17:46:23 +0200 Subject: [PATCH 0675/1255] Parser: add generated source files to project This fixes, among other things, autocompletion and building the project using the eclipse LSP server. --- vlog4j-parser/pom.xml | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index fa5b82280..6e3773633 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -44,6 +44,24 @@ + + org.codehaus.mojo + build-helper-maven-plugin + 1.8 + + + generate-sources + + + ${project.build.directory}/generated-sources/javacc/ + + + + add-source + + + + @@ -69,6 +87,27 @@ + + + org.codehaus.mojo + build-helper-maven-plugin + [1.0,) + + parse-version + add-source + maven-version + add-resource + add-test-resource + add-test-source + + + + + true + true + + + From 5b866be608eba641177ee6f02368e3dccef6639f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 2 Oct 2019 14:57:57 +0200 Subject: [PATCH 0676/1255] Parser: split out tests relating to data sources --- .../parser/RuleParserDataSourceTest.java | 87 +++++++++++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 57 +----------- 2 files changed, 89 insertions(+), 55 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java new file mode 100644 index 000000000..22700a0b0 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -0,0 +1,87 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; + +public class RuleParserDataSourceTest { + @Test + public void testCsvSource() throws ParsingException, IOException { + String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); + Predicate p = Expressions.makePredicate("p", 2); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); + assertEquals(Arrays.asList(d), statements); + } + + @Test + public void testRdfSource() throws ParsingException, IOException { + String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); + Predicate p = Expressions.makePredicate("p", 3); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); + assertEquals(Arrays.asList(d), statements); + } + + @Test(expected = ParsingException.class) + public void testRdfSourceInvalidArity() throws ParsingException, IOException { + String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + RuleParser.parse(input); + } + + @Test + public void testSparqlSource() throws ParsingException, MalformedURLException { + String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( + new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); + Predicate p = Expressions.makePredicate("p", 2); + DataSourceDeclaration d = new DataSourceDeclarationImpl(p, sparqlds); + assertEquals(Arrays.asList(d), statements); + } + + @Test(expected = ParsingException.class) + public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { + String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parse(input); + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8ba1fb964..6abc8647a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,31 +21,21 @@ */ import static org.junit.Assert.assertEquals; -import java.io.File; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -351,49 +341,6 @@ public void tesLiteralError() throws ParsingException { RuleParser.parseLiteral(input); } - @Test - public void testCsvSource() throws ParsingException, IOException { - String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); - assertEquals(Arrays.asList(d), statements); - } - - @Test - public void testRdfSource() throws ParsingException, IOException { - String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); - Predicate p = Expressions.makePredicate("p", 3); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); - assertEquals(Arrays.asList(d), statements); - } - - @Test(expected = ParsingException.class) - public void testRdfSourceInvalidArity() throws ParsingException, IOException { - String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RuleParser.parse(input); - } - - @Test - public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( - new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, sparqlds); - assertEquals(Arrays.asList(d), statements); - } - - @Test(expected = ParsingException.class) - public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - RuleParser.parse(input); - } - @Test(expected = ParsingException.class) public void testBlankPrefixDeclaration() throws ParsingException { String input = "@prefix _: . s(c) ."; From 1e6e3dbe25c369587b8827f72da7e405aa58ed22 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 4 Oct 2019 00:26:52 +0200 Subject: [PATCH 0677/1255] Parser: allow registration of custom data sources --- .../parser/DataSourceDeclarationHandler.java | 43 ++++++++++ .../vlog4j/parser/ParserConfiguration.java | 82 +++++++++++++++++++ .../semanticweb/vlog4j/parser/RuleParser.java | 31 +++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 44 +++++++--- .../parser/javacc/JavaCCParserBase.java | 38 +++++++-- .../parser/RuleParserDataSourceTest.java | 27 ++++++ 6 files changed, 249 insertions(+), 16 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java new file mode 100644 index 000000000..70a02b8de --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -0,0 +1,43 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import org.semanticweb.vlog4j.core.model.api.DataSource; + +/** + * Handler for parsing a custom Data Source declaration. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DataSourceDeclarationHandler { + /** + * Parse a Data Source Declaration. + * + * @param arguments + * Arguments given to the Data Source declaration. + * + * @throws ParsingException when the given arguments are invalid for the Data Source. + * @return DataSource a DataSource instance. + */ + DataSource handleDeclaration(String[] arguments) throws ParsingException; +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java new file mode 100644 index 000000000..2b2dbb66f --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -0,0 +1,82 @@ +package org.semanticweb.vlog4j.parser; + +import java.util.HashMap; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.parser.javacc.ParseException; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Class to keep parser configuration. + * + * @author Maximilian Marx + */ +public class ParserConfiguration { + /** + * Register a new Data Source. + * + * @param name + * Name of the data source, as it appears in the declaring directive. + * + * @param handler + * Handler for parsing a data source declaration. + * + * @throws IllegalArgumentException if the provided name is already registered. + * @return this + */ + public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) throws IllegalArgumentException { + if (dataSources.containsKey(name)) { + throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); + } + + this.dataSources.put(name, handler); + return this; + } + + /** + * Parse a Data Source declaration. + * + * @param name + * Name of the data source. + * + * @param args + * arguments given in the data source declaration. + * + * @throws ParsingException when the declaration is invalid, e.g., if the Data Source is not known. + * + * @return the Data Source instance. + */ + public DataSource parseDataSourceDeclaration(String name, String[] args) throws ParsingException { + DataSourceDeclarationHandler handler = dataSources.get(name); + + if (handler == null) { + throw new ParsingException("Data source \"" + name + "\" is not known."); + } + + return handler.handleDeclaration(args); + } + + /** + * The registered data sources. + */ + HashMap dataSources = new HashMap<>(); +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index ffd6ec8cf..129537296 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -46,6 +46,22 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { + final JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setKnowledgeBase(knowledgeBase); + parser.setParserConfiguration(parserConfiguration); + doParse(parser); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { + parseInto(knowledgeBase, stream, "UTF-8", parserConfiguration); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, "UTF-8", parserConfiguration); + } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding) throws ParsingException { final JavaCCParser javaCcParser = new JavaCCParser(stream, encoding); @@ -62,6 +78,21 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final String inp parseInto(knowledgeBase, inputStream, "UTF-8"); } + public static KnowledgeBase parse(final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { + JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setParserConfiguration(parserConfiguration); + return doParse(parser); + } + + public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { + return parse(stream, "UTF-8", parserConfiguration); + } + + public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + return parse(inputStream, "UTF-8", parserConfiguration); + } + public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { return doParse(new JavaCCParser(stream, encoding)); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 9c3785d20..e58c47b5b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -21,6 +21,7 @@ import java.net.MalformedURLException; import java.util.List; import java.util.ArrayList; +import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -109,11 +110,13 @@ DataSource dataSource() throws PrefixDeclarationException: String endpoint; String variables; String query; + Token sourceName; + String[] arguments; } { < LOADCSV > < LPAREN > fileName = String() < RPAREN > { - try { + try { return new CsvFileDataSource(new File(fileName)) ; } catch (IOException e) { throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); @@ -134,7 +137,15 @@ DataSource dataSource() throws PrefixDeclarationException: } catch (MalformedURLException e) { throw new ParseException("SPARQL endoint \"" + endpoint +"\" is not a valid URL: " + e.getMessage()); } - } + } +| sourceName = < DIRECTIVENAME > < LPAREN > arguments = Strings() < RPAREN > + { + try { + return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments); + } catch (ParsingException e) { + throw new ParseException(e.getMessage()); + } + } } @@ -161,13 +172,13 @@ Rule rule() throws PrefixDeclarationException: { // check that the intersection between headExiVars and BodyVars is empty for (String variable : headExiVars) { - if (bodyVars.contains(variable)) + if (bodyVars.contains(variable)) throw new ParseException("Malformed rule " + head + " :- " + body + "\nExistential variable " + variable + " also used in rule body."); } // check that bodyVars contains headUniVars for (String variable : headUniVars) { - if (!bodyVars.contains(variable)) + if (!bodyVars.contains(variable)) throw new ParseException("Unsafe rule " + head + " :- " + body + "\nUniversal variable " + variable + " occurs in head but not in body."); } @@ -226,7 +237,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: { predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { - try { + try { return Expressions.makeFact(predicateName, terms); } catch (IllegalArgumentException e) { throw new ParseException("Error parsing fact: " + e.toString()); @@ -352,6 +363,18 @@ String String(): } } +String[] Strings(): +{ + String str; + String[] rest = {}; +} +{ + str = String() [< COMMA > rest = Strings()] + { + return collectStrings(str, rest); + } +} + String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: { String iri; @@ -428,7 +451,7 @@ TOKEN: ( (< DIGITS >)+ "." (< DIGITS >)* | "." (< DIGITS >)+ - ) + ) > | < DOUBLE : ([ "+", "-" ])? @@ -436,7 +459,7 @@ TOKEN: ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > | "." ([ "0"-"9" ])+ (< EXPONENT >) | ([ "0"-"9" ])+ < EXPONENT > - ) + ) > | < #DIGITS : ([ "0"-"9" ])+ > | < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > @@ -508,6 +531,8 @@ TOKEN : | < VARORPREDNAME : < A2Z> (< A2ZN >)* > | < #A2Z : [ "a"-"z", "A"-"Z" ] > | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > +| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > +| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > } TOKEN : @@ -539,10 +564,10 @@ TOKEN : | [ "\u2070"-"\u218f" ] | [ "\u2c00"-"\u2fef" ] | [ "\u3001"-"\ud7ff" ] - | [ "\uf900"-"\ufffd" ] + | [ "\uf900"-"\ufffd" ] > // | [ ""#x10000-#xEFFFF] -| +| < #PN_CHARS_U : < PN_CHARS_BASE > | "_" > @@ -579,4 +604,3 @@ TOKEN : < PN_CHARS > )? > } - diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index cfcfcd96a..7f0929e5f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.parser.javacc; +import java.util.ArrayList; + /*- * #%L * vlog4j-parser @@ -9,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,6 +23,7 @@ */ import java.util.HashSet; +import java.util.List; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; @@ -31,17 +34,18 @@ import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.core.model.api.Predicate; /** * Basic methods used in the JavaCC-generated parser. - * + * * Implementation of some string escaping methods adapted from Apache Jena, * released under Apache 2.0 license terms. - * + * * @see https://github.com/apache/jena/blob/master/jena-core/src/main/java/org/apache/jena/n3/turtle/ParserBase.java - * + * * @author Markus Kroetzsch * @author Larry Gonzalez * @author Jena developers, Apache Software Foundation (ASF) @@ -51,6 +55,7 @@ public class JavaCCParserBase { final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); KnowledgeBase knowledgeBase; + ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. @@ -67,7 +72,7 @@ public class JavaCCParserBase { /** * Defines the context for parsing sub-formulas. - * + * * @author Markus Kroetzsch * */ @@ -86,6 +91,7 @@ public enum FormulaContext { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); + this.parserConfiguration = new ParserConfiguration(); } Constant createIntegerConstant(String lexicalForm) { @@ -105,6 +111,17 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) { knowledgeBase.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } + static String[] collectStrings(String str, String[] rest) { + ArrayList strings = new ArrayList<>(); + strings.add(str); + + for (String next : rest) { + strings.add(next); + } + + return strings.toArray(rest); + } + static String unescapeStr(String s, int line, int column) throws ParseException { return unescape(s, '\\', false, line, column); } @@ -230,4 +247,13 @@ public KnowledgeBase getKnowledgeBase() { return knowledgeBase; } + public void setParserConfiguration(ParserConfiguration parserConfiguration) { + this.parserConfiguration = parserConfiguration; + } + + public ParserConfiguration getParserConfiguration() { + return parserConfiguration; + } + + } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 22700a0b0..54f17c95a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -21,6 +21,7 @@ */ import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.io.File; import java.io.IOException; @@ -29,7 +30,10 @@ import java.util.ArrayList; import java.util.Arrays; +import javax.sql.DataSource; + import org.junit.Test; +import org.mockito.Matchers; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Statement; @@ -38,6 +42,8 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -84,4 +90,25 @@ public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURL String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; RuleParser.parse(input); } + + @Test(expected = ParsingException.class) + public void testUnknownDataSource() throws ParsingException { + String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; + RuleParser.parse(input); + } + + @Test + public void testCustomDataSource() throws ParsingException { + CsvFileDataSource source = mock(CsvFileDataSource.class); + DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", handler); + doReturn(source).when(handler).handleDeclaration(Matchers.any()); + + String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; + String[] expectedArguments = {"hello", "world"}; + RuleParser.parse(input, parserConfiguration); + + verify(handler).handleDeclaration(eq(expectedArguments)); + } } From 5684e8dbd6e1930d0e4e0cf5c6463b1e8f904555 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 11 Nov 2019 19:57:09 +0100 Subject: [PATCH 0678/1255] Bump mockito to 2.28.2 --- pom.xml | 2 +- .../vlog4j/syntax/parser/RuleParserDataSourceTest.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 98b1b8ce1..07c9e2e46 100644 --- a/pom.xml +++ b/pom.xml @@ -64,7 +64,7 @@ UTF-8 2.1.100 4.12 - 1.10.19 + 2.28.2 1.7.28 3.9 1.5 diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 54f17c95a..8f2a3116a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -33,7 +33,7 @@ import javax.sql.DataSource; import org.junit.Test; -import org.mockito.Matchers; +import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.Statement; @@ -103,7 +103,7 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(Matchers.any()); + doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.any()); String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; String[] expectedArguments = {"hello", "world"}; From a4ce824ac9a1a8d8f956fad884c50686163908e6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 12 Nov 2019 12:09:35 +0100 Subject: [PATCH 0679/1255] Parser: Add handlers for CSV/RDF/SPARQL data source declarations --- .../vlog4j/core/model/api/DataSource.java | 90 +- .../implementation/RdfFileDataSource.java | 8 +- .../parser/DataSourceDeclarationHandler.java | 41 +- .../vlog4j/parser/ParserConfiguration.java | 114 +- .../CsvFileDataSourceDeclarationHandler.java | 51 + .../RdfFileDataSourceDeclarationHandler.java | 51 + ...eryResultDataSourceDeclarationHandler.java | 67 + .../vlog4j/parser/javacc/JavaCCParser.jj | 1187 ++++++++--------- .../parser/javacc/JavaCCParserBase.java | 32 +- .../parser/javacc/SubParserFactory.java | 78 ++ .../parser/RuleParserDataSourceTest.java | 39 +- 11 files changed, 1020 insertions(+), 738 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java index d085716e6..504603d71 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java @@ -1,39 +1,51 @@ -package org.semanticweb.vlog4j.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Interfaces various types of data sources for storing facts. - * - * @author Irina Dragoste - * - */ -public interface DataSource extends Entity { - - /** - * Constructs a String representation of the data source. - * - * @return a String representation of the data source configuration for a - * certain predicate. - */ - public String toConfigString(); - -} +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Optional; + +/** + * Interfaces various types of data sources for storing facts. + * + * @author Irina Dragoste + * + */ +public interface DataSource extends Entity { + + /** + * Constructs a String representation of the data source. + * + * @return a String representation of the data source configuration for a + * certain predicate. + */ + public String toConfigString(); + + /** + * Retrieve the required arity of target predicates for the data source. + * + * @return the required arity for the data source, or Optional.empty() if there + * is none. + */ + public default Optional getRequiredArity() { + return Optional.empty(); + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java index e65515dc7..eb3ce09ea 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java @@ -23,8 +23,8 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; +import java.util.Optional; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** @@ -63,7 +63,7 @@ public class RdfFileDataSource extends FileDataSource { * {@code rdfFile} does not occur in * {@link #possibleExtensions}. */ - public RdfFileDataSource(@NonNull final File rdfFile) throws IOException { + public RdfFileDataSource(final File rdfFile) throws IOException { super(rdfFile, possibleExtensions); } @@ -77,4 +77,8 @@ public String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + public Optional getRequiredArity() { + return Optional.of(3); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 70a02b8de..d1f8766b1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -20,8 +20,10 @@ * #L% */ +import java.util.List; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** * Handler for parsing a custom Data Source declaration. @@ -30,14 +32,33 @@ */ @FunctionalInterface public interface DataSourceDeclarationHandler { - /** - * Parse a Data Source Declaration. - * - * @param arguments - * Arguments given to the Data Source declaration. - * - * @throws ParsingException when the given arguments are invalid for the Data Source. - * @return DataSource a DataSource instance. - */ - DataSource handleDeclaration(String[] arguments) throws ParsingException; + /** + * Parse a Data Source Declaration. + * + * @param arguments Arguments given to the Data Source declaration. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when the given arity or arguments are invalid for + * the Data Source. + * @return a @{link DataSource} instance corresponding to the given arguments. + */ + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException; + + /** + * Validate the provided number of arguments to the data source. + * + * @param arguments Arguments given to the Data Source declaration. + * @param number expected number of arguments + * + * @throws ParsingException when the given number of Arguments is invalid for + * the Data Source. + */ + static void verifyCorrectNumberOfArguments(List arguments, int number) throws ParsingException { + if (arguments.size() != number) { + throw new ParsingException("Invalid number of arguments " + arguments.size() + + " for Data Source declaration, expected " + number); + } + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 2b2dbb66f..e810aa930 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -1,10 +1,5 @@ package org.semanticweb.vlog4j.parser; -import java.util.HashMap; - -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.parser.javacc.ParseException; - /*- * #%L * vlog4j-parser @@ -25,58 +20,81 @@ * #L% */ +import java.util.HashMap; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + /** * Class to keep parser configuration. * * @author Maximilian Marx */ public class ParserConfiguration { - /** - * Register a new Data Source. - * - * @param name - * Name of the data source, as it appears in the declaring directive. - * - * @param handler - * Handler for parsing a data source declaration. - * - * @throws IllegalArgumentException if the provided name is already registered. - * @return this - */ - public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) throws IllegalArgumentException { - if (dataSources.containsKey(name)) { - throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); - } + public ParserConfiguration() { + registerDefaultDataSources(); + } + + /** + * Register a new Data Source. + * + * @param name Name of the data source, as it appears in the declaring + * directive. + * @param handler Handler for parsing a data source declaration. + * + * @throws IllegalArgumentException if the provided name is already registered. + * @return this + */ + public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) + throws IllegalArgumentException { + if (dataSources.containsKey(name)) { + throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); + } + + this.dataSources.put(name, handler); + return this; + } + + /** + * Parse a Data Source declaration. + * + * @param name Name of the data source. + * @param args arguments given in the data source declaration. + * @param subParserFactory a {@link SubParserFactory} instance that creates + * parser with the same context as the current parser. + * + * @throws ParsingException when the declaration is invalid, e.g., if the Data + * Source is not known. + * + * @return the Data Source instance. + */ + public DataSource parseDataSourceDeclaration(String name, List args, + final SubParserFactory subParserFactory) throws ParsingException { + DataSourceDeclarationHandler handler = dataSources.get(name); - this.dataSources.put(name, handler); - return this; - } + if (handler == null) { + throw new ParsingException("Data source \"" + name + "\" is not known."); + } - /** - * Parse a Data Source declaration. - * - * @param name - * Name of the data source. - * - * @param args - * arguments given in the data source declaration. - * - * @throws ParsingException when the declaration is invalid, e.g., if the Data Source is not known. - * - * @return the Data Source instance. - */ - public DataSource parseDataSourceDeclaration(String name, String[] args) throws ParsingException { - DataSourceDeclarationHandler handler = dataSources.get(name); + return handler.handleDeclaration(args, subParserFactory); + } - if (handler == null) { - throw new ParsingException("Data source \"" + name + "\" is not known."); - } + /** + * Register built-in data sources (currently CSV, RDF, SPARQL). + */ + private void registerDefaultDataSources() { + registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); + registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); + registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); + } - return handler.handleDeclaration(args); - } - /** - * The registered data sources. - */ - HashMap dataSources = new HashMap<>(); + /** + * The registered data sources. + */ + private HashMap dataSources = new HashMap<>(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java new file mode 100644 index 000000000..dc640dff5 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -0,0 +1,51 @@ +package org.semanticweb.vlog4j.parser.datasources; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@link CsvFileDataSource} declarations + * + * @author Maximilian Marx + */ +public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + String fileName = arguments.get(0); + + try { + return new CsvFileDataSource(new File(fileName)); + } catch (IOException e) { + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java new file mode 100644 index 000000000..29714b972 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -0,0 +1,51 @@ +package org.semanticweb.vlog4j.parser.datasources; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@link RdfFileDataSource} declarations + * + * @author Maximilian Marx + */ +public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + String fileName = arguments.get(0); + + try { + return new RdfFileDataSource(new File(fileName)); + } catch (IOException e) { + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java new file mode 100644 index 000000000..1faff9341 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -0,0 +1,67 @@ +package org.semanticweb.vlog4j.parser.datasources; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.List; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; +import org.semanticweb.vlog4j.parser.javacc.ParseException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@link SparqlQueryResultDataSource} declarations + * + * @author Maximilian Marx + */ +public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 3); + + String endpoint = arguments.get(0); + try { + JavaCCParser parser = subParserFactory.makeSubParser(endpoint); + endpoint = parser.IRI(false); + } catch (ParseException | PrefixDeclarationException e) { + throw new ParsingException(e); + } + + String variables = arguments.get(1); + String query = arguments.get(2); + + try { + return new SparqlQueryResultDataSource(new URL(endpoint), variables, query); + } catch (MalformedURLException e) { + throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index e58c47b5b..022a1815a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -1,606 +1,581 @@ -options -{ - // Use \ u escapes in streams AND use a reader for the query - // => get both raw and escaped unicode - JAVA_UNICODE_ESCAPE = true; - UNICODE_INPUT = false; - - STATIC = false; - // DEBUG_PARSER = true; - // DEBUG_TOKEN_MANAGER = true ; -} - -PARSER_BEGIN(JavaCCParser) -package org.semanticweb.vlog4j.parser.javacc; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.net.MalformedURLException; - -import java.util.List; -import java.util.ArrayList; - -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; - -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; - -import org.semanticweb.vlog4j.core.model.implementation.Expressions; - - -public class JavaCCParser extends JavaCCParserBase -{ -} - -PARSER_END(JavaCCParser) - - -void parse() throws PrefixDeclarationException: -{ -} -{ - ( base() )? - ( prefix() )* - ( source() )* - ( statement() )* - < EOF > -} - -void base() throws PrefixDeclarationException: -{ - String iriString; -} -{ - < BASE > iriString = IRIREF() < DOT > - { - prefixDeclarations.setBase(iriString); - } -} - -void prefix() throws PrefixDeclarationException: -{ - Token t; - String iriString; -} -{ - ( - LOOKAHEAD(< COLON >) < PREFIX > t = < COLON > iriString = IRIREF() < DOT > - | < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > - ) - { - //note that prefix includes the colon (:) - prefixDeclarations.setPrefix(t.image, iriString); - } -} - -void source() throws PrefixDeclarationException: -{ - String predicateName; - DataSource dataSource; - Token arity; -} -{ - < SOURCE > predicateName = predicateName() < LPAREN > arity = < INTEGER > < RPAREN > < COLON > dataSource = dataSource() < DOT > - { - int nArity; - nArity = Integer.parseInt(arity.image); - // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! - if ( dataSource instanceof RdfFileDataSource && nArity != 3 ) - throw new ParseException("Cannot load RDF data into predicate of arity " + nArity +"."); - addDataSource(predicateName, nArity,dataSource); - } -} - -DataSource dataSource() throws PrefixDeclarationException: -{ - String fileName; - String endpoint; - String variables; - String query; - Token sourceName; - String[] arguments; -} -{ - < LOADCSV > < LPAREN > fileName = String() < RPAREN > - { - try { - return new CsvFileDataSource(new File(fileName)) ; - } catch (IOException e) { - throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); - } - } -| < LOADRDF > < LPAREN > fileName = String() < RPAREN > - { - try { - return new RdfFileDataSource(new File(fileName)) ; - } catch (IOException e) { - throw new ParseException("Could not use source file \"" + fileName +"\": " + e.getMessage()); - } - } -| < SPARQL > < LPAREN > endpoint = IRI(false) < COMMA > variables = String() < COMMA > query = String() < RPAREN > - { - try { - return new SparqlQueryResultDataSource(new URL(endpoint), variables, query); - } catch (MalformedURLException e) { - throw new ParseException("SPARQL endoint \"" + endpoint +"\" is not a valid URL: " + e.getMessage()); - } - } -| sourceName = < DIRECTIVENAME > < LPAREN > arguments = Strings() < RPAREN > - { - try { - return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments); - } catch (ParsingException e) { - throw new ParseException(e.getMessage()); - } - } -} - - -void statement() throws PrefixDeclarationException: -{ - Statement statement; - resetVariableSets(); -} -{ - LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} -| statement = fact(FormulaContext.HEAD) < DOT > //not from a rule - { - knowledgeBase.addStatement(statement); - } -} - -Rule rule() throws PrefixDeclarationException: -{ - List < PositiveLiteral > head; - List < Literal > body; -} -{ - head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > - { - // check that the intersection between headExiVars and BodyVars is empty - for (String variable : headExiVars) { - if (bodyVars.contains(variable)) - throw new ParseException("Malformed rule " + head + " :- " + body + "\nExistential variable " + variable + " also used in rule body."); - } - - // check that bodyVars contains headUniVars - for (String variable : headUniVars) { - if (!bodyVars.contains(variable)) - throw new ParseException("Unsafe rule " + head + " :- " + body + "\nUniversal variable " + variable + " occurs in head but not in body."); - } - - return Expressions.makeRule(Expressions.makePositiveConjunction(head), Expressions.makeConjunction(body)); - } -} - -List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException: -{ - PositiveLiteral l; - List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); -} -{ - l = positiveLiteral(context) { list.add(l); } - ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* - { return list; } -} - -List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException: -{ - Literal l; - List < Literal > list = new ArrayList < Literal > (); -} -{ - l = literal(context) { list.add(l); } - ( < COMMA > l = literal(context) { list.add(l); } )* - { return list; } -} - -Literal literal(FormulaContext context) throws PrefixDeclarationException: -{ - Literal l = null; -} -{ - l = positiveLiteral(context) { return l; } -| l = negativeLiteral(context) { return l; } -} - -PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException: -{ - Token t; - List < Term > terms; - String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makePositiveLiteral(predicateName, terms); } -} - -Fact fact(FormulaContext context) throws PrefixDeclarationException: -{ - Token t; - List < Term > terms; - String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { - try { - return Expressions.makeFact(predicateName, terms); - } catch (IllegalArgumentException e) { - throw new ParseException("Error parsing fact: " + e.toString()); - } - } -} - -NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: -{ - List < Term > terms; - String predicateName; -} -{ - < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makeNegativeLiteral(predicateName, terms); } -} - -List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException: -{ - Term t; - List < Term > list = new ArrayList < Term > (); -} -{ - t = term(context) { list.add(t); } - ( < COMMA > t = term(context) { list.add(t); } )* - { return list; } -} - -String predicateName() throws PrefixDeclarationException: -{ - String s; - Token t; -} -{ - s = IRI(false) { return s; } -| t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } -} - -Term term(FormulaContext context) throws PrefixDeclarationException: -{ - Token t; - String s; - Constant c; -} -{ //TODO move Expressions.makeConstant to JavaCCParserBase - s = IRI(false) { return Expressions.makeAbstractConstant(s); } -| c = NumericLiteral() { return c; } -| c = RDFLiteral() { return c; } -| t = < UNIVAR > - { - s = t.image.substring(1); - if (context == FormulaContext.HEAD) - headUniVars.add(s); - else if (context == FormulaContext.BODY) - bodyVars.add(s); - return Expressions.makeUniversalVariable(s); - } -| t = < EXIVAR > - { - s = t.image.substring(1); - if (context == FormulaContext.HEAD) - headExiVars.add(s); - if (context == FormulaContext.BODY) - throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); - return Expressions.makeExistentialVariable(s); - } -| t = < VARORPREDNAME > { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(t.image));} -} - -/** [16] */ -Constant NumericLiteral() : -{ - Token t; -} -{ - t = < INTEGER > { return createIntegerConstant(t.image); } -| t = < DECIMAL > { return createDecimalConstant(t.image); } -| t = < DOUBLE > { return createDoubleConstant(t.image); } -} - -Constant RDFLiteral() throws PrefixDeclarationException: -{ - Token t; - String lex = null; - String lang = null; // Optional lang tag and datatype. - String dt = null; -} -{ - lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? - { return createDataConstant(lex, lang, dt); } -} - -String Langtag() : -{ - Token t; -} -{ - // Enumerate the directives here because they look like language tags. - ( - t = < LANGTAG > - ) - { - String lang = stripChars(t.image, 1); - return lang; - } -} - -String String(): -{ - Token t; - String lex; -} -{ - ( - t = < STRING_LITERAL1 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL2 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL_LONG1 > { lex = stripQuotes3(t.image); } - | t = < STRING_LITERAL_LONG2 > { lex = stripQuotes3(t.image); } - ) - { - lex = unescapeStr(lex, t.beginLine, t.beginColumn); - return lex; - } -} - -String[] Strings(): -{ - String str; - String[] rest = {}; -} -{ - str = String() [< COMMA > rest = Strings()] - { - return collectStrings(str, rest); - } -} - -String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: -{ - String iri; -} -{ - ( - iri = IRIREF() - | iri = PrefixedName() - ) - { - String result = prefixDeclarations.absolutize(iri); - if (includeAngleBrackets) { - result = "<"+result+">"; - } - return result; - } -} - -String PrefixedName() throws PrefixDeclarationException: -{ - Token t; -} -{ - //( - t = < PNAME_LN > - //| t = < PNAME_NS > - //) - { return prefixDeclarations.resolvePrefixedName(t.image);} - //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} -} - -String IRIREF() : -{ - Token t; -} -{ - t = < IRI > - { - // we remove '<' and '>' - return t.image.substring(1,t.image.length()-1); - } -} - -// ------------------------------------------ -// Whitespace -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| "\f" -} - -//Comments -SKIP :{< "%" (~["\n"])* "\n" >} - -// ------------------------------------------ -TOKEN : -{ - < PREFIX : "@prefix" > -| < BASE : "@base" > -| < SOURCE : "@source" > -| < LOADCSV : "load-csv"> -| < LOADRDF : "load-rdf"> -| < SPARQL : "sparql"> -} - -TOKEN: -{ - < INTEGER : ([ "-", "+" ])? < DIGITS > > -| < DECIMAL : - ([ "-", "+" ])? - ( - (< DIGITS >)+ "." (< DIGITS >)* - | "." (< DIGITS >)+ - ) - > -| < DOUBLE : - ([ "+", "-" ])? - ( - ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > - | "." ([ "0"-"9" ])+ (< EXPONENT >) - | ([ "0"-"9" ])+ < EXPONENT > - ) - > -| < #DIGITS : ([ "0"-"9" ])+ > -| < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > -} - -TOKEN: -{ - < STRING_LITERAL1 : - // Single quoted string - "'" - ( - (~[ "'", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "'" > -| < STRING_LITERAL2 : - // Double quoted string - "\"" - ( - (~[ "\"", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "\"" > -| < STRING_LITERAL_LONG1 : - "'''" - ( - ~[ "'", "\\" ] - | < ECHAR > - | ("'" ~[ "'" ]) - | ("''" ~[ "'" ]) - )* - "'''" > -| < STRING_LITERAL_LONG2 : - "\"\"\"" - ( - ~[ "\"", "\\" ] - | < ECHAR > - | ("\"" ~[ "\"" ]) - | ("\"\"" ~[ "\"" ]) - )* - "\"\"\"" > -| < #ECHAR : - "\\" - ( - "t" - | "b" - | "n" - | "r" - | "f" - | "\\" - | "\"" - | "'" - ) > -} - -TOKEN : -{ - // Includes # for relative URIs - < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > -| < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > -| < PNAME_NS : < PN_PREFIX > ":" > -| < UNIVAR : < QMARK > < VARORPREDNAME > > -| < EXIVAR : < EMARK > < VARORPREDNAME > > -| < LANGTAG : - < AT > (< A2Z >)+ - ( - "-" (< A2ZN >)+ - )* > -| < VARORPREDNAME : < A2Z> (< A2ZN >)* > -| < #A2Z : [ "a"-"z", "A"-"Z" ] > -| < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > -| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > -| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > -} - -TOKEN : -{ - < LPAREN : "(" > -| < RPAREN : ")" > -| < COMMA : "," > -| < DOT : "." > -| < ARROW : ":-" > -| < QMARK : "?" > -| < EMARK : "!" > -| < TILDE : "~" > -| < COLON : ":" > -| < DATATYPE : "^^" > -| < AT : "@" > -} - -TOKEN : -{ - < #PN_CHARS_BASE : - [ "A"-"Z" ] - | [ "a"-"z" ] - | [ "\u00c0"-"\u00d6" ] - | [ "\u00d8"-"\u00f6" ] - | [ "\u00f8"-"\u02ff" ] - | [ "\u0370"-"\u037d" ] - | [ "\u037f"-"\u1fff" ] - | [ "\u200c"-"\u200d" ] - | [ "\u2070"-"\u218f" ] - | [ "\u2c00"-"\u2fef" ] - | [ "\u3001"-"\ud7ff" ] - | [ "\uf900"-"\ufffd" ] - > - // | [ ""#x10000-#xEFFFF] -| - < #PN_CHARS_U : - < PN_CHARS_BASE > - | "_" > -| < #PN_CHARS : - ( - < PN_CHARS_U > - | "-" - | [ "0"-"9" ] - | "\u00b7" - | [ "\u0300"-"\u036f" ] - | [ "\u203f"-"\u2040" ] - ) > -| < #PN_PREFIX : - < PN_CHARS_BASE > - ( - ( - < PN_CHARS > - | "." - )* - < PN_CHARS > - )? > -| < #PN_LOCAL : - ( - < PN_CHARS_U > - | ":" - | [ "0"-"9" ] - ) - ( - ( - < PN_CHARS > - | "." - | ":" - )* - < PN_CHARS > - )? > -} +options +{ + // Use \ u escapes in streams AND use a reader for the query + // => get both raw and escaped unicode + JAVA_UNICODE_ESCAPE = true; + UNICODE_INPUT = false; + + STATIC = false; + // DEBUG_PARSER = true; + // DEBUG_TOKEN_MANAGER = true ; +} + +PARSER_BEGIN(JavaCCParser) +package org.semanticweb.vlog4j.parser.javacc; + +import java.io.File; +import java.io.InputStream; +import java.io.IOException; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.List; +import java.util.ArrayList; +import java.util.LinkedList; + +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; + +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; + +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + + +public class JavaCCParser extends JavaCCParserBase +{ + private SubParserFactory getSubParserFactory() { + return new SubParserFactory(this); + } +} + +PARSER_END(JavaCCParser) + + +void parse() throws PrefixDeclarationException: +{ +} +{ + ( base() )? + ( prefix() )* + ( source() )* + ( statement() )* + < EOF > +} + +void base() throws PrefixDeclarationException: +{ + String iriString; +} +{ + < BASE > iriString = IRIREF() < DOT > + { + prefixDeclarations.setBase(iriString); + } +} + +void prefix() throws PrefixDeclarationException: +{ + Token t; + String iriString; +} +{ + ( + LOOKAHEAD(< COLON >) < PREFIX > t = < COLON > iriString = IRIREF() < DOT > + | < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > + ) + { + //note that prefix includes the colon (:) + prefixDeclarations.setPrefix(t.image, iriString); + } +} + +void source() throws PrefixDeclarationException: +{ + String predicateName; + DataSource dataSource; + Token arity; +} +{ + < SOURCE > predicateName = predicateName() < LPAREN > arity = < INTEGER > < RPAREN > < COLON > dataSource = dataSource() < DOT > + { + int nArity; + nArity = Integer.parseInt(arity.image); + // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! + if ( dataSource instanceof RdfFileDataSource && nArity != 3 ) + throw new ParseException("Cannot load RDF data into predicate of arity " + nArity +"."); + + addDataSource(predicateName, nArity, dataSource); + } +} + +DataSource dataSource() throws PrefixDeclarationException: +{ + Token sourceName; + List< String > arguments; +} +{ + (sourceName = < DIRECTIVENAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > + { + try { + return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); + } catch (ParsingException e) { + throw new ParseException(e.getMessage()); + } + } +} + +void statement() throws PrefixDeclarationException: +{ + Statement statement; + resetVariableSets(); +} +{ + LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} +| statement = fact(FormulaContext.HEAD) < DOT > //not from a rule + { + knowledgeBase.addStatement(statement); + } +} + +Rule rule() throws PrefixDeclarationException: +{ + List < PositiveLiteral > head; + List < Literal > body; +} +{ + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > + { + // check that the intersection between headExiVars and BodyVars is empty + for (String variable : headExiVars) { + if (bodyVars.contains(variable)) + throw new ParseException("Malformed rule " + head + " :- " + body + "\nExistential variable " + variable + " also used in rule body."); + } + + // check that bodyVars contains headUniVars + for (String variable : headUniVars) { + if (!bodyVars.contains(variable)) + throw new ParseException("Unsafe rule " + head + " :- " + body + "\nUniversal variable " + variable + " occurs in head but not in body."); + } + + return Expressions.makeRule(Expressions.makePositiveConjunction(head), Expressions.makeConjunction(body)); + } +} + +List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException: +{ + PositiveLiteral l; + List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); +} +{ + l = positiveLiteral(context) { list.add(l); } + ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* + { return list; } +} + +List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException: +{ + Literal l; + List < Literal > list = new ArrayList < Literal > (); +} +{ + l = literal(context) { list.add(l); } + ( < COMMA > l = literal(context) { list.add(l); } )* + { return list; } +} + +Literal literal(FormulaContext context) throws PrefixDeclarationException: +{ + Literal l = null; +} +{ + l = positiveLiteral(context) { return l; } +| l = negativeLiteral(context) { return l; } +} + +PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException: +{ + Token t; + List < Term > terms; + String predicateName; +} +{ + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + { return Expressions.makePositiveLiteral(predicateName, terms); } +} + +Fact fact(FormulaContext context) throws PrefixDeclarationException: +{ + Token t; + List < Term > terms; + String predicateName; +} +{ + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + { + try { + return Expressions.makeFact(predicateName, terms); + } catch (IllegalArgumentException e) { + throw new ParseException("Error parsing fact: " + e.toString()); + } + } +} + +NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: +{ + List < Term > terms; + String predicateName; +} +{ + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + { return Expressions.makeNegativeLiteral(predicateName, terms); } +} + +List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException: +{ + Term t; + List < Term > list = new ArrayList < Term > (); +} +{ + t = term(context) { list.add(t); } + ( < COMMA > t = term(context) { list.add(t); } )* + { return list; } +} + +String predicateName() throws PrefixDeclarationException: +{ + String s; + Token t; +} +{ + s = IRI(false) { return s; } +| t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } +} + +Term term(FormulaContext context) throws PrefixDeclarationException: +{ + Token t; + String s; + Constant c; +} +{ //TODO move Expressions.makeConstant to JavaCCParserBase + s = IRI(false) { return Expressions.makeAbstractConstant(s); } +| c = NumericLiteral() { return c; } +| c = RDFLiteral() { return c; } +| t = < UNIVAR > + { + s = t.image.substring(1); + if (context == FormulaContext.HEAD) + headUniVars.add(s); + else if (context == FormulaContext.BODY) + bodyVars.add(s); + return Expressions.makeUniversalVariable(s); + } +| t = < EXIVAR > + { + s = t.image.substring(1); + if (context == FormulaContext.HEAD) + headExiVars.add(s); + if (context == FormulaContext.BODY) + throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); + return Expressions.makeExistentialVariable(s); + } +| t = < VARORPREDNAME > { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(t.image));} +} + +/** [16] */ +Constant NumericLiteral() : +{ + Token t; +} +{ + t = < INTEGER > { return createIntegerConstant(t.image); } +| t = < DECIMAL > { return createDecimalConstant(t.image); } +| t = < DOUBLE > { return createDoubleConstant(t.image); } +} + +Constant RDFLiteral() throws PrefixDeclarationException: +{ + Token t; + String lex = null; + String lang = null; // Optional lang tag and datatype. + String dt = null; +} +{ + lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? + { return createDataConstant(lex, lang, dt); } +} + +String Langtag() : +{ + Token t; +} +{ + // Enumerate the directives here because they look like language tags. + ( + t = < LANGTAG > + ) + { + String lang = stripChars(t.image, 1); + return lang; + } +} + +String String(): +{ + Token t; + String lex; +} +{ + ( + t = < STRING_LITERAL1 > { lex = stripQuotes(t.image); } + | t = < STRING_LITERAL2 > { lex = stripQuotes(t.image); } + | t = < STRING_LITERAL_LONG1 > { lex = stripQuotes3(t.image); } + | t = < STRING_LITERAL_LONG2 > { lex = stripQuotes3(t.image); } + ) + { + lex = unescapeStr(lex, t.beginLine, t.beginColumn); + return lex; + } +} + +LinkedList< String > Arguments() throws PrefixDeclarationException: +{ + String str; + LinkedList< String > rest = new LinkedList< String >(); +} +{ + (str = String() | str = IRI(true)) [< COMMA > rest = Arguments()] + { + rest.addFirst(str); + return rest; + } +} + +String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: +{ + String iri; +} +{ + ( + iri = IRIREF() + | iri = PrefixedName() + ) + { + String result = prefixDeclarations.absolutize(iri); + if (includeAngleBrackets) { + result = "<"+result+">"; + } + return result; + } +} + +String PrefixedName() throws PrefixDeclarationException: +{ + Token t; +} +{ + //( + t = < PNAME_LN > + //| t = < PNAME_NS > + //) + { return prefixDeclarations.resolvePrefixedName(t.image);} + //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} +} + +String IRIREF() : +{ + Token t; +} +{ + t = < IRI > + { + // we remove '<' and '>' + return t.image.substring(1,t.image.length()-1); + } +} + +// ------------------------------------------ +// Whitespace +SKIP : +{ + " " +| "\t" +| "\n" +| "\r" +| "\f" +} + +//Comments +SKIP :{< "%" (~["\n"])* "\n" >} + +// ------------------------------------------ +TOKEN : +{ + < PREFIX : "@prefix" > +| < BASE : "@base" > +| < SOURCE : "@source" > +} + +TOKEN: +{ + < INTEGER : ([ "-", "+" ])? < DIGITS > > +| < DECIMAL : + ([ "-", "+" ])? + ( + (< DIGITS >)+ "." (< DIGITS >)* + | "." (< DIGITS >)+ + ) + > +| < DOUBLE : + ([ "+", "-" ])? + ( + ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > + | "." ([ "0"-"9" ])+ (< EXPONENT >) + | ([ "0"-"9" ])+ < EXPONENT > + ) + > +| < #DIGITS : ([ "0"-"9" ])+ > +| < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > +} + +TOKEN: +{ + < STRING_LITERAL1 : + // Single quoted string + "'" + ( + (~[ "'", "\\", "\n", "\r" ]) + | < ECHAR > + )* + "'" > +| < STRING_LITERAL2 : + // Double quoted string + "\"" + ( + (~[ "\"", "\\", "\n", "\r" ]) + | < ECHAR > + )* + "\"" > +| < STRING_LITERAL_LONG1 : + "'''" + ( + ~[ "'", "\\" ] + | < ECHAR > + | ("'" ~[ "'" ]) + | ("''" ~[ "'" ]) + )* + "'''" > +| < STRING_LITERAL_LONG2 : + "\"\"\"" + ( + ~[ "\"", "\\" ] + | < ECHAR > + | ("\"" ~[ "\"" ]) + | ("\"\"" ~[ "\"" ]) + )* + "\"\"\"" > +| < #ECHAR : + "\\" + ( + "t" + | "b" + | "n" + | "r" + | "f" + | "\\" + | "\"" + | "'" + ) > +} + +TOKEN : +{ + // Includes # for relative URIs + < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > +| < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > +| < PNAME_NS : < PN_PREFIX > ":" > +| < UNIVAR : < QMARK > < VARORPREDNAME > > +| < EXIVAR : < EMARK > < VARORPREDNAME > > +| < LANGTAG : + < AT > (< A2Z >)+ + ( + "-" (< A2ZN >)+ + )* > +| < VARORPREDNAME : < A2Z> (< A2ZN >)* > +| < #A2Z : [ "a"-"z", "A"-"Z" ] > +| < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > +| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > +| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > +} + +TOKEN : +{ + < LPAREN : "(" > +| < RPAREN : ")" > +| < COMMA : "," > +| < DOT : "." > +| < ARROW : ":-" > +| < QMARK : "?" > +| < EMARK : "!" > +| < TILDE : "~" > +| < COLON : ":" > +| < DATATYPE : "^^" > +| < AT : "@" > +} + +TOKEN : +{ + < #PN_CHARS_BASE : + [ "A"-"Z" ] + | [ "a"-"z" ] + | [ "\u00c0"-"\u00d6" ] + | [ "\u00d8"-"\u00f6" ] + | [ "\u00f8"-"\u02ff" ] + | [ "\u0370"-"\u037d" ] + | [ "\u037f"-"\u1fff" ] + | [ "\u200c"-"\u200d" ] + | [ "\u2070"-"\u218f" ] + | [ "\u2c00"-"\u2fef" ] + | [ "\u3001"-"\ud7ff" ] + | [ "\uf900"-"\ufffd" ] + > + // | [ ""#x10000-#xEFFFF] +| + < #PN_CHARS_U : + < PN_CHARS_BASE > + | "_" > +| < #PN_CHARS : + ( + < PN_CHARS_U > + | "-" + | [ "0"-"9" ] + | "\u00b7" + | [ "\u0300"-"\u036f" ] + | [ "\u203f"-"\u2040" ] + ) > +| < #PN_PREFIX : + < PN_CHARS_BASE > + ( + ( + < PN_CHARS > + | "." + )* + < PN_CHARS > + )? > +| < #PN_LOCAL : + ( + < PN_CHARS_U > + | ":" + | [ "0"-"9" ] + ) + ( + ( + < PN_CHARS > + | "." + | ":" + )* + < PN_CHARS > + )? > +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7f0929e5f..8902a4c08 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -52,7 +52,7 @@ * */ public class JavaCCParserBase { - final PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); + PrefixDeclarations prefixDeclarations; KnowledgeBase knowledgeBase; ParserConfiguration parserConfiguration; @@ -91,6 +91,7 @@ public enum FormulaContext { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); + this.prefixDeclarations = new LocalPrefixDeclarations(); this.parserConfiguration = new ParserConfiguration(); } @@ -106,22 +107,19 @@ Constant createDoubleConstant(String lexicalForm) { return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_DOUBLE); } - void addDataSource(String predicateName, int arity, DataSource dataSource) { + void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { + if (dataSource.getRequiredArity().isPresent()) { + Integer requiredArity = dataSource.getRequiredArity().get(); + if (requiredArity != arity) { + throw new ParseException("Invalid arity " + arity + " for data source, " + + "expected " + requiredArity + "."); + } + } + Predicate predicate = Expressions.makePredicate(predicateName, arity); knowledgeBase.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } - static String[] collectStrings(String str, String[] rest) { - ArrayList strings = new ArrayList<>(); - strings.add(str); - - for (String next : rest) { - strings.add(next); - } - - return strings.toArray(rest); - } - static String unescapeStr(String s, int line, int column) throws ParseException { return unescape(s, '\\', false, line, column); } @@ -213,7 +211,7 @@ static String stripChars(String s, int n) { /** * Creates a suitable {@link Constant} from the parsed data. - * + * * @param string the string data (unescaped) * @param languageTag the language tag, or null if not present * @param datatype the datatype, or null if not provided @@ -255,5 +253,11 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } + protected void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { + this.prefixDeclarations = prefixDeclarations; + } + protected PrefixDeclarations getPrefixDeclarations() { + return prefixDeclarations; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java new file mode 100644 index 000000000..ddea292b8 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -0,0 +1,78 @@ +package org.semanticweb.vlog4j.parser.javacc; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.InputStream; +import java.io.ByteArrayInputStream; + +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.ParserConfiguration; + +/** + * Factory for creating a SubParser sharing configuration, state, and + * prefixes, but with an independent input stream, to be used, e.g., + * for parsing arguments in data source declarations. + * + * @author Maximilian Marx + */ +public class SubParserFactory { + /** + * Construct a SubParserFactory. + * + * @argument parser the parser instance to get the state from. + */ + SubParserFactory(JavaCCParser parser) { + this.knowledgeBase = parser.getKnowledgeBase(); + this.prefixDeclarations = parser.getPrefixDeclarations(); + this.parserConfiguration = parser.getParserConfiguration(); + } + + /** + * Create a new parser with the specified state and given input. + * + * @argument inputStream the input stream to parse. + * @argument encoding encoding of the input stream. + * + * @return A new {@link JavaCCParser} bound to inputStream and + * with the specified parser state. + */ + public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { + JavaCCParser subParser = new JavaCCParser(inputStream, encoding); + subParser.setKnowledgeBase(knowledgeBase); + subParser.setPrefixDeclarations(prefixDeclarations); + subParser.setParserConfiguration(parserConfiguration); + + return subParser; + } + + public JavaCCParser makeSubParser(final InputStream inputStream) { + return makeSubParser(inputStream, "UTF-8"); + } + + public JavaCCParser makeSubParser(final String string) { + return makeSubParser(new ByteArrayInputStream(string.getBytes())); + } + + private KnowledgeBase knowledgeBase; + private ParserConfiguration parserConfiguration; + private PrefixDeclarations prefixDeclarations; +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 8f2a3116a..aee340a60 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -29,8 +29,7 @@ import java.net.URL; import java.util.ArrayList; import java.util.Arrays; - -import javax.sql.DataSource; +import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -46,6 +45,7 @@ import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; public class RuleParserDataSourceTest { @Test @@ -91,24 +91,25 @@ public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURL RuleParser.parse(input); } - @Test(expected = ParsingException.class) - public void testUnknownDataSource() throws ParsingException { - String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; - RuleParser.parse(input); - } + @Test(expected = ParsingException.class) + public void testUnknownDataSource() throws ParsingException { + String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; + RuleParser.parse(input); + } - @Test - public void testCustomDataSource() throws ParsingException { - CsvFileDataSource source = mock(CsvFileDataSource.class); - DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); - ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.any()); + @Test + public void testCustomDataSource() throws ParsingException { + CsvFileDataSource source = mock(CsvFileDataSource.class); + DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", handler); + doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), + ArgumentMatchers.any()); - String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; - String[] expectedArguments = {"hello", "world"}; - RuleParser.parse(input, parserConfiguration); + String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; + List expectedArguments = Arrays.asList("hello", "world"); + RuleParser.parse(input, parserConfiguration); - verify(handler).handleDeclaration(eq(expectedArguments)); - } + verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); + } } From 373df673d6315973339644414b7ad8100342ac21 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 12 Nov 2019 13:38:43 +0100 Subject: [PATCH 0680/1255] Parser: Split out Default Parser Configuration --- .../parser/DefaultParserConfiguration.java | 46 +++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 17 ------- .../parser/javacc/JavaCCParserBase.java | 23 +++++----- 3 files changed, 57 insertions(+), 29 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java new file mode 100644 index 000000000..168a738bb --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -0,0 +1,46 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; + +/** + * Default parser configuration. Registers default data sources. + * + * @author Maximilian Marx + */ +public class DefaultParserConfiguration extends ParserConfiguration { + public DefaultParserConfiguration() { + super(); + registerDefaultDataSources(); + } + + /** + * Register built-in data sources (currently CSV, RDF, SPARQL). + */ + private void registerDefaultDataSources() { + registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); + registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); + registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index e810aa930..cf04c68f4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -24,9 +24,6 @@ import java.util.List; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -35,10 +32,6 @@ * @author Maximilian Marx */ public class ParserConfiguration { - public ParserConfiguration() { - registerDefaultDataSources(); - } - /** * Register a new Data Source. * @@ -83,16 +76,6 @@ public DataSource parseDataSourceDeclaration(String name, List args, return handler.handleDeclaration(args, subParserFactory); } - /** - * Register built-in data sources (currently CSV, RDF, SPARQL). - */ - private void registerDefaultDataSources() { - registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); - registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); - registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); - } - - /** * The registered data sources. */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 8902a4c08..dcd01197c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.parser.javacc; -import java.util.ArrayList; - /*- * #%L * vlog4j-parser @@ -33,6 +31,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -55,7 +54,7 @@ public class JavaCCParserBase { PrefixDeclarations prefixDeclarations; KnowledgeBase knowledgeBase; - ParserConfiguration parserConfiguration; + ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. @@ -92,7 +91,7 @@ public enum FormulaContext { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); this.prefixDeclarations = new LocalPrefixDeclarations(); - this.parserConfiguration = new ParserConfiguration(); + this.parserConfiguration = new DefaultParserConfiguration(); } Constant createIntegerConstant(String lexicalForm) { @@ -111,8 +110,8 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw if (dataSource.getRequiredArity().isPresent()) { Integer requiredArity = dataSource.getRequiredArity().get(); if (requiredArity != arity) { - throw new ParseException("Invalid arity " + arity + " for data source, " - + "expected " + requiredArity + "."); + throw new ParseException( + "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); } } @@ -245,13 +244,13 @@ public KnowledgeBase getKnowledgeBase() { return knowledgeBase; } - public void setParserConfiguration(ParserConfiguration parserConfiguration) { - this.parserConfiguration = parserConfiguration; - } + public void setParserConfiguration(ParserConfiguration parserConfiguration) { + this.parserConfiguration = parserConfiguration; + } - public ParserConfiguration getParserConfiguration() { - return parserConfiguration; - } + public ParserConfiguration getParserConfiguration() { + return parserConfiguration; + } protected void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; From a81208ae749eb4e679124f4187bd0ff3d11e7a09 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 12 Nov 2019 20:33:31 +0100 Subject: [PATCH 0681/1255] Parser: Allow for custom DatatypeConstants --- .../parser/DatatypeConstantHandler.java | 42 ++++++ .../vlog4j/parser/ParserConfiguration.java | 64 +++++++++ .../semanticweb/vlog4j/parser/RuleParser.java | 134 +++++++++++------- .../vlog4j/parser/javacc/JavaCCParser.jj | 16 +-- .../parser/javacc/JavaCCParserBase.java | 50 +++---- .../vlog4j/syntax/parser/RuleParserTest.java | 23 ++- 6 files changed, 247 insertions(+), 82 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java new file mode 100644 index 000000000..7f8ffc3e7 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java @@ -0,0 +1,42 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; + +/** + * Handler for parsing a custom Data Source declaration. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DatatypeConstantHandler { + /** + * Parse a Data Source Declaration. + * + * @param lexicalForm lexical representation of the constant. + * + * @throws ParsingException when the given representation is invalid for this datatype. + + * @return + */ + public DatatypeConstant createConstant(String lexicalForm) throws ParsingException; +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index cf04c68f4..1aec212b0 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -23,7 +23,10 @@ import java.util.HashMap; import java.util.List; +import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -76,8 +79,69 @@ public DataSource parseDataSourceDeclaration(String name, List args, return handler.handleDeclaration(args, subParserFactory); } + /** + * Parse a constant with optional data type and language tag. + * + * @param lexicalForm the (unescaped) lexical form of the constant. + * @param languageTag the language tag, or null if not present. + * @param the datatype, or null if not present. + * @note At most one of {@code languageTag} and {@code datatype} may be + * non-null. + * + * @throws ParsingException when the lexical form is invalid for the + * given data type. + * @throws IllegalArgumentException when neither {@code languageTag} and + * {@code datatype} are null. + * @return the {@link Constant} corresponding to the given arguments. + */ + public Constant parseConstant(String lexicalForm, String languageTag, String datatype) + throws ParsingException, IllegalArgumentException { + if (languageTag != null && datatype != null) { + throw new IllegalArgumentException( + "A constant with a language tag may not explicitly specify a data type."); + } else if (languageTag != null) { + return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); + } else { + String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + DatatypeConstantHandler handler = datatypes.get(type); + + if (handler != null) { + return handler.createConstant(lexicalForm); + } + + return Expressions.makeDatatypeConstant(lexicalForm, type); + } + } + + /** + * Register a new data type. + * + * @param name the IRI representing the data type. + * @param handler a {@link DatatypeConstantHandler} that parses a syntactic form + * into a {@link Constant}. + * + * @throws IllegalArgumentException when the data type name has already been + * registered. + * + * @return this + */ + public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) + throws IllegalArgumentException { + if (datatypes.containsKey(name)) { + throw new IllegalArgumentException("Data type \"" + name + "\" is already registered."); + } + + this.datatypes.put(name, handler); + return this; + } + /** * The registered data sources. */ private HashMap dataSources = new HashMap<>(); + + /** + * The registered datatypes. + */ + private HashMap datatypes = new HashMap<>(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 129537296..c3ba4160d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.SyntaxObject; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.FormulaContext; @@ -46,18 +47,21 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); - public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { - final JavaCCParser parser = new JavaCCParser(stream, encoding); - parser.setKnowledgeBase(knowledgeBase); - parser.setParserConfiguration(parserConfiguration); - doParse(parser); - } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + final JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setKnowledgeBase(knowledgeBase); + parser.setParserConfiguration(parserConfiguration); + doParse(parser); + } - public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, + final ParserConfiguration parserConfiguration) throws ParsingException { parseInto(knowledgeBase, stream, "UTF-8", parserConfiguration); } - public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, + final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); parseInto(knowledgeBase, inputStream, "UTF-8", parserConfiguration); } @@ -78,20 +82,23 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final String inp parseInto(knowledgeBase, inputStream, "UTF-8"); } - public static KnowledgeBase parse(final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration) throws ParsingException { - JavaCCParser parser = new JavaCCParser(stream, encoding); - parser.setParserConfiguration(parserConfiguration); - return doParse(parser); - } + public static KnowledgeBase parse(final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setParserConfiguration(parserConfiguration); + return doParse(parser); + } - public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { - return parse(stream, "UTF-8", parserConfiguration); - } + public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parse(stream, "UTF-8", parserConfiguration); + } - public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, "UTF-8", parserConfiguration); - } + public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + return parse(inputStream, "UTF-8", parserConfiguration); + } public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { return doParse(new JavaCCParser(stream, encoding)); @@ -106,48 +113,79 @@ public static KnowledgeBase parse(final String input) throws ParsingException { return parse(inputStream, "UTF-8"); } - public static Rule parseRule(final String input) throws ParsingException { + /** + * Interface for a method parsing a {@link SyntaxObject}. + * + * This is needed to specify the exceptions thrown by the parse method. + */ + @FunctionalInterface + interface SyntaxObjectParser { + T parse(final JavaCCParser parser) throws ParseException, PrefixDeclarationException, TokenMgrError; + } + + /** + * Parse a {@link SyntaxObject}. + * + * @param input Input string. + * @param parserAction Parsing method for the {@code T}. + * @param syntaxObjectType Description of the type {@code T} being parsed. + * @param parserConfiguration {@link ParserConfiguration} instance, or null. + * + * @throws ParsingException when an error during parsing occurs. + * @return an appropriate instance of {@code T} + */ + static T parseSyntaxObject(final String input, SyntaxObjectParser parserAction, + final String syntaxObjectType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + + if (parserConfiguration != null) { + localParser.setParserConfiguration(parserConfiguration); + } + try { - return localParser.rule(); + return parserAction.parse(localParser); } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing rule: {}!", input); - throw new ParsingException("Exception while parsing rule", e); + LOGGER.error("Exception while parsing " + syntaxObjectType + ": {}!", input); + throw new ParsingException("Exception while parsing " + syntaxObjectType, e); } } + public static Rule parseRule(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxObject(input, JavaCCParser::rule, "rule", parserConfiguration); + } + + public static Rule parseRule(final String input) throws ParsingException { + return parseRule(input, null); + } + + public static Literal parseLiteral(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxObject(input, parser -> parser.literal(FormulaContext.HEAD), "literal", parserConfiguration); + } + public static Literal parseLiteral(final String input) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - try { - return localParser.literal(FormulaContext.HEAD); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing literal: {}!", input); - throw new ParsingException("Exception while parsing literal", e); - } + return parseLiteral(input, null); + } + + public static PositiveLiteral parsePositiveLiteral(final String input, + final ParserConfiguration parserConfiguration) throws ParsingException { + return parseSyntaxObject(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", + parserConfiguration); } public static PositiveLiteral parsePositiveLiteral(final String input) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - try { - return localParser.positiveLiteral(FormulaContext.HEAD); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing positive literal: {}!", input); - throw new ParsingException("Exception while parsing positive literal", e); - } + return parsePositiveLiteral(input, null); + } + + public static Fact parseFact(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxObject(input, parser -> parser.fact(FormulaContext.HEAD), "fact", parserConfiguration); } public static Fact parseFact(final String input) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - try { - return localParser.fact(FormulaContext.HEAD); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing fact: {}!", input); - throw new ParsingException("Exception while parsing fact: {}!", e); - } + return parseFact(input, null); } static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 022a1815a..46e15b71b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -36,6 +36,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -259,8 +260,8 @@ Term term(FormulaContext context) throws PrefixDeclarationException: String s; Constant c; } -{ //TODO move Expressions.makeConstant to JavaCCParserBase - s = IRI(false) { return Expressions.makeAbstractConstant(s); } +{ + s = IRI(false) { return createConstant(s); } | c = NumericLiteral() { return c; } | c = RDFLiteral() { return c; } | t = < UNIVAR > @@ -281,7 +282,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException: throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); return Expressions.makeExistentialVariable(s); } -| t = < VARORPREDNAME > { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(t.image));} +| t = < VARORPREDNAME > { return createConstant(t.image); } } /** [16] */ @@ -290,21 +291,20 @@ Constant NumericLiteral() : Token t; } { - t = < INTEGER > { return createIntegerConstant(t.image); } -| t = < DECIMAL > { return createDecimalConstant(t.image); } -| t = < DOUBLE > { return createDoubleConstant(t.image); } + t = < INTEGER > { return createConstant(t.image, PrefixDeclarations.XSD_INTEGER); } +| t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } +| t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } } Constant RDFLiteral() throws PrefixDeclarationException: { - Token t; String lex = null; String lang = null; // Optional lang tag and datatype. String dt = null; } { lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? - { return createDataConstant(lex, lang, dt); } + { return createConstant(lex, lang, dt); } } String Langtag() : diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index dcd01197c..7abc4ad4f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -23,6 +23,7 @@ import java.util.HashSet; import java.util.List; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -34,6 +35,7 @@ import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.core.model.api.Predicate; /** @@ -94,16 +96,33 @@ public JavaCCParserBase() { this.parserConfiguration = new DefaultParserConfiguration(); } - Constant createIntegerConstant(String lexicalForm) { - return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_INTEGER); + Constant createConstant(String lexicalForm) throws ParseException { + try { + return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(lexicalForm)); + } catch (PrefixDeclarationException e) { + throw new ParseException(e.getMessage()); + } } - Constant createDecimalConstant(String lexicalForm) { - return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_DECIMAL); + Constant createConstant(String lexicalForm, String datatype) throws ParseException { + return createConstant(lexicalForm, null, datatype); } - Constant createDoubleConstant(String lexicalForm) { - return Expressions.makeDatatypeConstant(lexicalForm, PrefixDeclarations.XSD_DOUBLE); + + /** + * Creates a suitable {@link Constant} from the parsed data. + * + * @param string the string data (unescaped) + * @param languageTag the language tag, or null if not present + * @param datatype the datatype, or null if not provided + * @return suitable constant + */ + Constant createConstant(String lexicalForm, String languageTag, String datatype) throws ParseException { + try { + return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); + } catch (ParsingException e) { + throw new ParseException(e.getMessage()); + } } void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { @@ -208,25 +227,6 @@ static String stripChars(String s, int n) { return s.substring(n, s.length()); } - /** - * Creates a suitable {@link Constant} from the parsed data. - * - * @param string the string data (unescaped) - * @param languageTag the language tag, or null if not present - * @param datatype the datatype, or null if not provided - * @return suitable constant - */ - Constant createDataConstant(String string, String languageTag, String datatype) { - // https://www.w3.org/TR/turtle/#grammar-production-String RDFLiteral - if (datatype != null) { - return new DatatypeConstantImpl(string, datatype); - } else if (languageTag != null) { - return new LanguageStringConstantImpl(string, languageTag); - } else { - return new DatatypeConstantImpl(string, "http://www.w3.org/2001/XMLSchema#string"); - } - } - /** * Reset the local set variables used when parsing a rule. */ diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 6abc8647a..13a8d9cd5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,15 +19,19 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.util.ArrayList; import java.util.Arrays; import org.junit.Test; +import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -36,6 +40,8 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -374,4 +380,19 @@ public void predicateAbsoluteIRITest() throws ParsingException { Fact f2 = Expressions.makeFact("a:b", a); assertEquals(f, f2); } + + @Test + public void testCustomDatatype() throws ParsingException { + final String typename = "http://example.org/#test"; + DatatypeConstant constant = Expressions.makeDatatypeConstant("test", typename); + DatatypeConstantHandler handler = mock(DatatypeConstantHandler.class); + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDatatype(typename, handler); + doReturn(constant).when(handler).createConstant(ArgumentMatchers.eq("hello, world")); + + String input = "p(\"hello, world\"^^<" + typename + ">) ."; + Literal literal = RuleParser.parseLiteral(input, parserConfiguration); + DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + assertEquals(constant, result); + } } From bcffd16f040bd47b0d7de603340a549d5b9037ff Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 Nov 2019 13:01:04 +0100 Subject: [PATCH 0682/1255] Parser: Force consumption of all input when parsing SyntaxObjects --- .../semanticweb/vlog4j/parser/RuleParser.java | 4 +++- .../vlog4j/parser/javacc/JavaCCParser.jj | 4 ++++ .../vlog4j/syntax/parser/RuleParserTest.java | 23 ++++++++++++++++++- 3 files changed, 29 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index c3ba4160d..0dfa67778 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -144,7 +144,9 @@ static T parseSyntaxObject(final String input, SyntaxOb } try { - return parserAction.parse(localParser); + T result = parserAction.parse(localParser); + localParser.ensureEndOfInput(); + return result; } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { LOGGER.error("Exception while parsing " + syntaxObjectType + ": {}!", input); throw new ParsingException("Exception while parsing " + syntaxObjectType, e); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 46e15b71b..8593d2421 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -49,6 +49,10 @@ public class JavaCCParser extends JavaCCParserBase private SubParserFactory getSubParserFactory() { return new SubParserFactory(this); } + + public void ensureEndOfInput() throws ParseException { + jj_consume_token(EOF); + } } PARSER_END(JavaCCParser) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 13a8d9cd5..11db42fd6 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -365,6 +365,27 @@ public void testBlankPredicateName() throws ParsingException { RuleParser.parse(input); } + @Test(expected = ParsingException.class) + public void testInvalidDatatypeOnLiteral() throws ParsingException { + final String input = "P(\"a\")^^whatever"; + RuleParser.parseLiteral(input); + } + + @Test(expected = ParsingException.class) + public void testNonIriTypeInDatatypeLiteral() throws ParsingException { + final String input = "P(\"a\"^^whatever)"; + RuleParser.parseLiteral(input); + } + + @Test + public void testIriTypeInDatatypeLiteral() throws ParsingException { + final String iri = "whatever"; + final String input = "P(\"a\"^^<" + iri + ">)"; + Literal literal = RuleParser.parseLiteral(input); + DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + assertEquals(iri, result.getDatatype()); + } + @Test public void predicateRelativeNumericIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); @@ -390,7 +411,7 @@ public void testCustomDatatype() throws ParsingException { parserConfiguration.registerDatatype(typename, handler); doReturn(constant).when(handler).createConstant(ArgumentMatchers.eq("hello, world")); - String input = "p(\"hello, world\"^^<" + typename + ">) ."; + String input = "p(\"hello, world\"^^<" + typename + ">)"; Literal literal = RuleParser.parseLiteral(input, parserConfiguration); DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(constant, result); From 9b11a9ff37ce71f81f4521dbdb366f924b5b9f15 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 25 Nov 2019 16:32:15 +0100 Subject: [PATCH 0683/1255] Parser: add helpers to parse DataSourceDeclarations and Terms --- .../semanticweb/vlog4j/parser/RuleParser.java | 76 +++++++++++++++---- .../parser/RuleParserDataSourceTest.java | 29 ++----- .../vlog4j/syntax/parser/RuleParserTest.java | 67 +++++++--------- 3 files changed, 97 insertions(+), 75 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 0dfa67778..5d942435a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -22,13 +22,16 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.SyntaxObject; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.FormulaContext; @@ -114,28 +117,29 @@ public static KnowledgeBase parse(final String input) throws ParsingException { } /** - * Interface for a method parsing a {@link SyntaxObject}. + * Interface for a method parsing a fragment of the supported syntax. * * This is needed to specify the exceptions thrown by the parse method. */ @FunctionalInterface - interface SyntaxObjectParser { - T parse(final JavaCCParser parser) throws ParseException, PrefixDeclarationException, TokenMgrError; + interface SyntaxFragmentParser { + T parse(final JavaCCParser parser) + throws ParsingException, ParseException, PrefixDeclarationException, TokenMgrError; } /** - * Parse a {@link SyntaxObject}. + * Parse a syntax fragment. * * @param input Input string. * @param parserAction Parsing method for the {@code T}. - * @param syntaxObjectType Description of the type {@code T} being parsed. + * @param syntaxFragmentType Description of the type {@code T} being parsed. * @param parserConfiguration {@link ParserConfiguration} instance, or null. * * @throws ParsingException when an error during parsing occurs. * @return an appropriate instance of {@code T} */ - static T parseSyntaxObject(final String input, SyntaxObjectParser parserAction, - final String syntaxObjectType, final ParserConfiguration parserConfiguration) throws ParsingException { + static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, + final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); @@ -148,14 +152,14 @@ static T parseSyntaxObject(final String input, SyntaxOb localParser.ensureEndOfInput(); return result; } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing " + syntaxObjectType + ": {}!", input); - throw new ParsingException("Exception while parsing " + syntaxObjectType, e); + LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); + throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); } } public static Rule parseRule(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, JavaCCParser::rule, "rule", parserConfiguration); + return parseSyntaxFragment(input, JavaCCParser::rule, "rule", parserConfiguration); } public static Rule parseRule(final String input) throws ParsingException { @@ -164,7 +168,8 @@ public static Rule parseRule(final String input) throws ParsingException { public static Literal parseLiteral(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, parser -> parser.literal(FormulaContext.HEAD), "literal", parserConfiguration); + return parseSyntaxFragment(input, parser -> parser.literal(FormulaContext.HEAD), "literal", + parserConfiguration); } public static Literal parseLiteral(final String input) throws ParsingException { @@ -173,7 +178,7 @@ public static Literal parseLiteral(final String input) throws ParsingException { public static PositiveLiteral parsePositiveLiteral(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", + return parseSyntaxFragment(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", parserConfiguration); } @@ -183,13 +188,41 @@ public static PositiveLiteral parsePositiveLiteral(final String input) throws Pa public static Fact parseFact(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxObject(input, parser -> parser.fact(FormulaContext.HEAD), "fact", parserConfiguration); + return parseSyntaxFragment(input, parser -> parser.fact(FormulaContext.HEAD), "fact", parserConfiguration); } public static Fact parseFact(final String input) throws ParsingException { return parseFact(input, null); } + public static Term parseTerm(final String input, final FormulaContext context, + final ParserConfiguration parserConfiguration) throws ParsingException { + return parseSyntaxFragment(input, parser -> parser.term(context), "term", parserConfiguration); + } + + public static Term parseTerm(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseTerm(input, FormulaContext.HEAD, parserConfiguration); + } + + public static Term parseTerm(final String input, final FormulaContext context) throws ParsingException { + return parseTerm(input, context, null); + } + + public static Term parseTerm(final String input) throws ParsingException { + return parseTerm(input, (ParserConfiguration) null); + } + + public static DataSource parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxFragment(input, RuleParser::parseAndExtractDatasourceDeclaration, "data source declaration", + parserConfiguration); + } + + public static DataSource parseDataSourceDeclaration(final String input) throws ParsingException { + return parseDataSourceDeclaration(input, null); + } + static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { try { parser.parse(); @@ -200,4 +233,19 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException return parser.getKnowledgeBase(); } + protected static DataSource parseAndExtractDatasourceDeclaration(final JavaCCParser parser) + throws ParsingException, ParseException, PrefixDeclarationException { + parser.source(); + + final List dataSourceDeclarations = parser.getKnowledgeBase() + .getDataSourceDeclarations(); + + if (dataSourceDeclarations.size() != 1) { + throw new ParsingException( + "Unexpected number of data source declarations: " + dataSourceDeclarations.size()); + } + + return dataSourceDeclarations.get(0).getDataSource(); + } + } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index aee340a60..ff6029184 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -27,17 +27,11 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -51,50 +45,41 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, csvds); - assertEquals(Arrays.asList(d), statements); + assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); - Predicate p = Expressions.makePredicate("p", 3); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, rdfds); - assertEquals(Arrays.asList(d), statements); + assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testRdfSourceInvalidArity() throws ParsingException, IOException { String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RuleParser.parse(input); + RuleParser.parseDataSourceDeclaration(input); } @Test public void testSparqlSource() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); - Predicate p = Expressions.makePredicate("p", 2); - DataSourceDeclaration d = new DataSourceDeclarationImpl(p, sparqlds); - assertEquals(Arrays.asList(d), statements); + assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - RuleParser.parse(input); + RuleParser.parseDataSourceDeclaration(input); } @Test(expected = ParsingException.class) public void testUnknownDataSource() throws ParsingException { String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; - RuleParser.parse(input); + RuleParser.parseDataSourceDeclaration(input); } @Test @@ -108,7 +93,7 @@ public void testCustomDataSource() throws ParsingException { String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList("hello", "world"); - RuleParser.parse(input, parserConfiguration); + RuleParser.parseDataSourceDeclaration(input, parserConfiguration); verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 11db42fd6..58bab4a65 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -186,11 +186,10 @@ public void testNoDollarVariables() throws ParsingException { @Test public void testIntegerLiteral() throws ParsingException { - String input = "p(42) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(42)"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); - assertEquals(Arrays.asList(integerLiteral), statements); + assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test @@ -204,90 +203,81 @@ public void testAbbreviatedIntegerLiteral() throws ParsingException { @Test public void testFullIntegerLiteral() throws ParsingException { - String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> ) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> )"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); - assertEquals(Arrays.asList(integerLiteral), statements); + assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testDecimalLiteral() throws ParsingException { - String input = "p(-5.0) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(-5.0)"; PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("-5.0", PrefixDeclarations.XSD_DECIMAL)); - assertEquals(Arrays.asList(decimalLiteral), statements); + assertEquals(decimalLiteral, RuleParser.parseLiteral(input)); } @Test public void testDoubleLiteral() throws ParsingException { - String input = "p(4.2E9) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(4.2E9)"; PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarations.XSD_DOUBLE)); - assertEquals(Arrays.asList(doubleLiteral), statements); + assertEquals(doubleLiteral, RuleParser.parseLiteral(input)); } @Test public void testStringLiteral() throws ParsingException { - String input = "p(\"abc\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + String input = "p(\"abc\")"; + assertEquals(fact2, RuleParser.parseLiteral(input)); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteral() throws ParsingException { - String input = "p(\"abc) ."; - RuleParser.parse(input); + String input = "p(\"abc)"; + RuleParser.parseLiteral(input); } @Test public void testStringLiteralEscapes() throws ParsingException { - String input = "p(\"_\\\"_\\\\_\\n_\\t_\") ."; // User input: p("_\"_\\_\n_\t_") - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void testStringLiteralAllEscapes() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") - String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\") ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void testStringLiteralMultiLine() throws ParsingException { - String input = "p('''line 1\n\n" + "line 2\n" + "line 3''') ."; // User input: p("a\"b\\c") - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { - String input = "p('''abc\ndef'') ."; - RuleParser.parse(input); + String input = "p('''abc\ndef'')"; + RuleParser.parseLiteral(input); } @Test public void testFullLiteral() throws ParsingException { - String input = "p(\"abc\"^^) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + String input = "p(\"abc\"^^)"; + assertEquals(fact2, RuleParser.parseLiteral(input)); } @Test public void testUnicodeLiteral() throws ParsingException { - String input = "p(\"\\u0061\\u0062\\u0063\") ."; // "abc" - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + String input = "p(\"\\u0061\\u0062\\u0063\")"; // "abc" + assertEquals(fact2, RuleParser.parseLiteral(input)); } @Test @@ -306,11 +296,10 @@ public void testPrefixedLiteral() throws ParsingException { @Test public void testLangStringLiteral() throws ParsingException { - String input = "p(\"abc\"@en-gb) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + String input = "p(\"abc\"@en-gb)"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeLanguageStringConstant("abc", "en-gb")); - assertEquals(Arrays.asList(fact), statements); + assertEquals(fact, RuleParser.parseLiteral(input)); } @Test @@ -373,8 +362,8 @@ public void testInvalidDatatypeOnLiteral() throws ParsingException { @Test(expected = ParsingException.class) public void testNonIriTypeInDatatypeLiteral() throws ParsingException { - final String input = "P(\"a\"^^whatever)"; - RuleParser.parseLiteral(input); + final String input = "\"a\"^^whatever"; + RuleParser.parseTerm(input); } @Test From 14f12cc8a50380223ebd34677c8a2de7bb13d511 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 15:02:20 +0100 Subject: [PATCH 0684/1255] Require dots at the end of facts --- .../vlog4j/parser/javacc/JavaCCParser.jj | 4 ++-- .../vlog4j/syntax/parser/EntityTest.java | 8 ++++---- .../syntax/parser/RuleParserParseFactTest.java | 14 +++++++------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 8593d2421..3881f4bdf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -138,7 +138,7 @@ void statement() throws PrefixDeclarationException: } { LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} -| statement = fact(FormulaContext.HEAD) < DOT > //not from a rule +| statement = fact(FormulaContext.HEAD) //not from a rule { knowledgeBase.addStatement(statement); } @@ -217,7 +217,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: String predicateName; } { - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > { try { return Expressions.makeFact(predicateName, terms); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 8bd7cb008..c2fb1cfaa 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -139,10 +139,10 @@ public void predicateRoundTripTest3() throws ParsingException { @Test public void iriAngularBracketsTest() throws ParsingException { String constant = "a"; - Fact fact = RuleParser.parseFact("p(" + constant + ")"); + Fact fact = RuleParser.parseFact("p(" + constant + ")."); Term abstractConst = fact.getArguments().get(0); assertEquals(constant, abstractConst.toString()); - Fact fact2 = RuleParser.parseFact("p(<" + constant + ">)"); + Fact fact2 = RuleParser.parseFact("p(<" + constant + ">)."); Term abstractConst2 = fact2.getArguments().get(0); assertEquals(abstractConst, abstractConst2); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java index d6f423ee0..876b01ae8 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -40,28 +40,28 @@ public class RuleParserParseFactTest { @Test public void testFactArityOne() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\")"), factA); + assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); } @Test public void testFactArityOneWithDataType() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\")"), factA); + assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); } @Test public void testFactArityTwo() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\",\"b\")"), factAB); + assertEquals(RuleParser.parseFact("p(\"a\",\"b\") ."), factAB); } @Test(expected = ParsingException.class) public void testFactWithVariable() throws ParsingException { - String input = "p(?X)"; + String input = "p(?X) ."; RuleParser.parseFact(input); } @Test(expected = ParsingException.class) public void testZeroArityFact() throws ParsingException { - String input = "p()"; + String input = "p() ."; RuleParser.parseFact(input); } From e07bf1d7271ae72db09b21f7bb53ea12d03b7338 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 15:40:02 +0100 Subject: [PATCH 0685/1255] Parser: Use brackets in data source declarations for arity Implements #134. --- RELEASE-NOTES.md | 12 +- .../core/model/implementation/Serializer.java | 64 +++---- .../core/model/DataSourceDeclarationTest.java | 12 +- .../vlog4j/core/model/PredicateImplTest.java | 164 +++++++++--------- .../main/data/input/counting-triangles.rls | 3 +- vlog4j-examples/src/main/data/input/doid.rls | 15 +- .../examples/CompareWikidataDBpedia.java | 12 +- .../examples/core/AddDataFromCsvFile.java | 6 +- .../examples/core/AddDataFromRdfFile.java | 2 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 4 +- .../vlog4j/syntax/parser/EntityTest.java | 44 ----- .../parser/RuleParserDataSourceTest.java | 60 +++++-- 12 files changed, 198 insertions(+), 200 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 0819972b3..2e99a8640 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -10,6 +10,7 @@ Breaking changes: * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes * Methods to access terms now use Java Streams and are unified across syntactic objects +* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` New features: * New module vlog4j-client provides a stand-alone command line client jar for VLog4j @@ -31,17 +32,17 @@ VLog4j v0.4.0 ------------- Breaking changes: -* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) +* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) * The EdbIdbSeparation is obsolete and does no longer exist * IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier * A new interface Fact has replaced the overly general PositiveLiteral in many places New features: * New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java -* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) +* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) * New InMemoryDataSource for efficient in-memory fact loading * New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner -* Modifications to the knowledge base are taken into account by the reasoner +* Modifications to the knowledge base are taken into account by the reasoner * New and updated example programs to illustrate use of syntax Other improvements: @@ -52,7 +53,7 @@ Other improvements: * Better code structure and testing Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now VLog4j v0.3.0 @@ -68,7 +69,7 @@ VLog4j v0.2.0 ------------- New features: -* supporting File data sources of N-Triples format (.nt file extension) +* supporting File data sources of N-Triples format (.nt file extension) * supporting g-zipped data source files (.csv.gz, .nt.gz) VLog4j v0.1.0 @@ -80,4 +81,3 @@ New features: * Essential data models for rules and facts, and essential reasoner functionality * support for reading from RDF files * support for converting rules from OWL ontology, loaded with the OWL API - diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ce1d58990..7726b506f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -44,7 +44,7 @@ /** * A utility class with static methods to obtain the correct parsable string * representation of the different data models. - * + * * @author Ali Elhalawati * */ @@ -55,8 +55,10 @@ public final class Serializer { public static final String EXISTENTIAL_IDENTIFIER = "!"; public static final String UNIVERSAL_IDENTIFIER = "?"; public static final String NAMEDNULL_IDENTIFIER = "_"; - public static final String OPEN_PARENTHESIS = "("; + public static final String OPENING_PARENTHESIS = "("; public static final String CLOSING_PARENTHESIS = ")"; + public static final String OPENING_BRACKET = "["; + public static final String CLOSING_BRACKET = "]"; public static final String RULE_SEPARATOR = " :- "; public static final String AT = "@"; public static final String DATA_SOURCE = "@source "; @@ -85,11 +87,11 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. - * + * */ public static String getString(final Rule rule) { return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; @@ -97,7 +99,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. @@ -118,7 +120,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. @@ -128,7 +130,7 @@ public static String getString(final Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPEN_PARENTHESIS); + stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPENING_PARENTHESIS); boolean first = true; for (final Term term : literal.getArguments()) { if (first) { @@ -145,7 +147,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. @@ -156,7 +158,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link Constant}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. @@ -168,7 +170,7 @@ public static String getString(final AbstractConstant constant) { /** * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given @@ -181,7 +183,7 @@ public static String getConstantName(final LanguageStringConstant languageString /** * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} without an IRI. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given @@ -204,7 +206,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { /** * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given @@ -217,7 +219,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given @@ -229,7 +231,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given @@ -241,7 +243,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. @@ -252,18 +254,18 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ public static String getString(final Predicate predicate) { - return predicate.getName() + OPEN_PARENTHESIS + predicate.getArity() + CLOSING_PARENTHESIS; + return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; } /** * Creates a String representation of a given {@link DataSourceDeclaration}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given @@ -276,44 +278,44 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki">. - * + * * @param csvFileDataSource * @return String representation corresponding to a given * {@link CsvFileDataSource}. */ public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; + return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; } /** * Creates a String representation of a given {@link RdfFileDataSource}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki">. - * - * + * + * * @param rdfFileDataSource * @return String representation corresponding to a given * {@link RdfFileDataSource}. */ public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPEN_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; + return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; } /** * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. - * + * * @see <"https://github.com/knowsys/vlog4j/wiki">. - * - * + * + * * @param dataSource * @return String representation corresponding to a given * {@link SparqlQueryResultDataSource}. */ public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPEN_PARENTHESIS + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + CLOSING_PARENTHESIS; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index caf805b82..c3ebcd4fb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -75,7 +75,7 @@ public void toString_SparqlQueryResultDataSource() throws IOException { new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); - assertEquals("@source p(3): sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", + assertEquals("@source p[3]: sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", dataSourceDeclaration.toString()); } @@ -91,7 +91,7 @@ public void toString_CsvFileDataSource() throws IOException { unzippedCsvFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } // FIXME: have String representation of files OS independent @@ -103,7 +103,7 @@ public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throw final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); - assertEquals("@source q(1): load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); } @Test @@ -117,6 +117,6 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { unzippedRdfFileDataSource); final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q(1): load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java index 0c750ebb1..508f4c90d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java @@ -1,82 +1,82 @@ -package org.semanticweb.vlog4j.core.model; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; - -public class PredicateImplTest { - - @Test - public void testEquals() { - final Predicate p1 = new PredicateImpl("p", 1); - final Predicate p1too = Expressions.makePredicate("p", 1); - final Predicate p2 = new PredicateImpl("p", 2); - final Predicate q1 = new PredicateImpl("q", 1); - - assertEquals(p1, p1); - assertEquals(p1too, p1); - assertNotEquals(p2, p1); - assertNotEquals(q1, p1); - assertNotEquals(p2.hashCode(), p1.hashCode()); - assertNotEquals(q1.hashCode(), p1.hashCode()); - assertFalse(p1.equals(null)); // written like this for recording coverage properly - assertFalse(p1.equals("p")); // written like this for recording coverage properly - } - - @Test(expected = NullPointerException.class) - public void predicateNameNotNull() { - new PredicateImpl(null, 2); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotEmpty() { - new PredicateImpl("", 1); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotWhitespace() { - new PredicateImpl(" ", 1); - } - - @Test(expected = IllegalArgumentException.class) - public void arityNegative() { - new PredicateImpl("p", -1); - } - - @Test(expected = IllegalArgumentException.class) - public void arityZero() { - new PredicateImpl("p", 0); - } - - @Test - public void predicateToStringTest() { - final Predicate p1 = new PredicateImpl("p", 1); - assertEquals("p(1)", p1.toString()); - } - -} +package org.semanticweb.vlog4j.core.model; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; + +public class PredicateImplTest { + + @Test + public void testEquals() { + final Predicate p1 = new PredicateImpl("p", 1); + final Predicate p1too = Expressions.makePredicate("p", 1); + final Predicate p2 = new PredicateImpl("p", 2); + final Predicate q1 = new PredicateImpl("q", 1); + + assertEquals(p1, p1); + assertEquals(p1too, p1); + assertNotEquals(p2, p1); + assertNotEquals(q1, p1); + assertNotEquals(p2.hashCode(), p1.hashCode()); + assertNotEquals(q1.hashCode(), p1.hashCode()); + assertFalse(p1.equals(null)); // written like this for recording coverage properly + assertFalse(p1.equals("p")); // written like this for recording coverage properly + } + + @Test(expected = NullPointerException.class) + public void predicateNameNotNull() { + new PredicateImpl(null, 2); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotEmpty() { + new PredicateImpl("", 1); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotWhitespace() { + new PredicateImpl(" ", 1); + } + + @Test(expected = IllegalArgumentException.class) + public void arityNegative() { + new PredicateImpl("p", -1); + } + + @Test(expected = IllegalArgumentException.class) + public void arityZero() { + new PredicateImpl("p", 0); + } + + @Test + public void predicateToStringTest() { + final Predicate p1 = new PredicateImpl("p", 1); + assertEquals("p[1]", p1.toString()); + } + +} diff --git a/vlog4j-examples/src/main/data/input/counting-triangles.rls b/vlog4j-examples/src/main/data/input/counting-triangles.rls index 223c4a77a..90cf145bb 100644 --- a/vlog4j-examples/src/main/data/input/counting-triangles.rls +++ b/vlog4j-examples/src/main/data/input/counting-triangles.rls @@ -2,7 +2,7 @@ % From Wikidata, get all countries (items with P31 relation to Q6256, or subclasses thereof: P279*) % that border (P47) each other: -@source borders(2): sparql(wdqs:sparql, "country1,country2", +@source borders[2]: sparql(wdqs:sparql, "country1,country2", '''?country1 wdt:P31/wdt:P279* wd:Q6256 . ?country2 wdt:P31/wdt:P279* wd:Q6256 . ?country1 wdt:P47 ?country2 .''') . @@ -15,4 +15,3 @@ country(?X) :- shareBorder(?X, ?Y) . % Compute all triangles: triangle(?X,?Y,?Z) :- shareBorder(?X,?Y), shareBorder(?Y,?Z), shareBorder(?Z,?X) . - diff --git a/vlog4j-examples/src/main/data/input/doid.rls b/vlog4j-examples/src/main/data/input/doid.rls index 545febc5e..e50e4e3ef 100644 --- a/vlog4j-examples/src/main/data/input/doid.rls +++ b/vlog4j-examples/src/main/data/input/doid.rls @@ -1,24 +1,24 @@ @prefix rdfs: . @prefix wdqs: . -@source doidTriple(3): load-rdf("src/main/data/input/doid.nt.gz") . -@source diseaseId(2): sparql(wdqs:sparql, "disease,doid", "?disease wdt:P699 ?doid .") . -@source recentDeaths(1): sparql(wdqs:sparql, "human", +@source doidTriple[3]: load-rdf("src/main/data/input/doid.nt.gz") . +@source diseaseId[2]: sparql(wdqs:sparql, "disease,doid", "?disease wdt:P699 ?doid .") . +@source recentDeaths[1]: sparql(wdqs:sparql, "human", '''?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)''') . -@source recentDeathsCause(2): sparql(wdqs:sparql, "human,causeOfDeath", +@source recentDeathsCause[2]: sparql(wdqs:sparql, "human,causeOfDeath", '''?human wdt:P31 wd:Q5; wdt:P570 ?deathDate ; - wdt:P509 ?causeOfDeath . + wdt:P509 ?causeOfDeath . FILTER (YEAR(?deathDate) = 2018)''') . % Combine recent death data (infer "unknown" cause if no cause given): deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . deathCause(?X, !Z) :- recentDeaths(?X) . -% Mark Wikidata diseases that have a DOID: -hasDoid(?X) :- diseaseId(?X, ?DoidId) . +% Mark Wikidata diseases that have a DOID: +hasDoid(?X) :- diseaseId(?X, ?DoidId) . % Relate DOID string ID (used on Wikidata) to DOID IRI (used in DOID ontology) doid(?Iri, ?DoidId) :- doidTriple(?Iri, ,?DoidId) . @@ -34,4 +34,3 @@ cancerDisease(?Xdoid) :- diseaseHierarchy(?X, ?Y), doid(?Y, "DOID:162"), doid(?X humansWhoDiedOfCancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), cancerDisease(?Z) . humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), ~cancerDisease(?Z) . humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), ~hasDoid(?Y) . - diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 7e1031f42..f3b3129c2 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,10 +36,10 @@ * over. For a fair comparison, we restrict to Wikidata entities that have a * related English Wikipedia page (others cannot be in English DBpedia in the * first place). - * + * * The example query used asks for alumni of the University of Leipzig (one of * the oldest European universities). - * + * * @author Markus Kroetzsch * */ @@ -72,8 +72,8 @@ public static void main(final String[] args) throws ParsingException, IOExceptio final String rules = "" // + "@prefix wdqs: ." // + "@prefix dbp: ." // - + "@source dbpResult(2) : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // - + "@source wdResult(2) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // + + "@source wdResult[2]) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + "% Rules:\n" // + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java index b68db527b..d493a3eb5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java @@ -56,11 +56,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio ExamplesUtils.configureLogging(); final String initialFactsHasPart = ""// a file input: - + "@source hasPart(2) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ."; + + "@source hasPart[2] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ."; final String rules = "" // first declare file inputs: - + "@source bicycle(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." - + "@source wheel(1) : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." + + "@source bicycle[1] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." + + "@source wheel[1] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." // every bicycle has some part that is a wheel: + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // every wheel is part of some bicycle: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java index a00a008d1..3a2cd3f8f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java @@ -67,7 +67,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio + "@prefix ex: ." + "@prefix rdf: ." // specify data sources: - + "@source triple(3) : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." + + "@source triple[3] : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." // every bicycle has some part that is a wheel: + "triple(?S, ex:hasPart, !X), triple(!X, rdf:type, ex:wheel) :- triple(?S, rdf:type, ex:bicycle) ." // every wheel is part of some bicycle: diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 3881f4bdf..89f542b82 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -103,7 +103,7 @@ void source() throws PrefixDeclarationException: Token arity; } { - < SOURCE > predicateName = predicateName() < LPAREN > arity = < INTEGER > < RPAREN > < COLON > dataSource = dataSource() < DOT > + < SOURCE > predicateName = predicateName() < LBRACK > arity = < INTEGER > < RBRACK > < COLON > dataSource = dataSource() < DOT > { int nArity; nArity = Integer.parseInt(arity.image); @@ -518,6 +518,8 @@ TOKEN : { < LPAREN : "(" > | < RPAREN : ")" > +| < LBRACK : "[" > +| < RBRACK : "]" > | < COMMA : "," > | < DOT : "." > | < ARROW : ":-" > diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index c2fb1cfaa..3ca6b90d2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -21,14 +21,9 @@ */ import static org.junit.Assert.assertEquals; -import java.io.File; -import java.io.IOException; -import java.net.URL; - import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; @@ -38,14 +33,9 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -239,38 +229,4 @@ public void datatypeDecimalToStringRoundTripTest() throws ParsingException { assertEquals(shortDecimalConstant, RuleParser.parseFact("p(" + shortDecimalConstant + ").").getArguments().get(0).toString()); } - - @Test - public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("p", 3); - SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), - "var", "?var wdt:P31 wd:Q5 ."); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); - RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); - } - - @Test - public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); - DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, - unzippedRdfFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); - assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); - } - - @Test - public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); - final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, - unzippedCsvFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); - assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); - } - } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index ff6029184..16011fa0f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -19,9 +19,11 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import java.io.File; import java.io.IOException; @@ -32,6 +34,11 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -44,27 +51,27 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { - String input = "@source p(2) : load-csv(\"src/main/data/input/example.csv\") ."; + String input = "@source p[2] : load-csv(\"src/main/data/input/example.csv\") ."; CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); } @Test public void testRdfSource() throws ParsingException, IOException { - String input = "@source p(3) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + String input = "@source p[3] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testRdfSourceInvalidArity() throws ParsingException, IOException { - String input = "@source p(2) : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + String input = "@source p[2] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; RuleParser.parseDataSourceDeclaration(input); } @Test public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); @@ -72,13 +79,13 @@ public void testSparqlSource() throws ParsingException, MalformedURLException { @Test(expected = ParsingException.class) public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { - String input = "@source p(2) : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; RuleParser.parseDataSourceDeclaration(input); } @Test(expected = ParsingException.class) public void testUnknownDataSource() throws ParsingException { - String input = "@source p(2) : unknown-data-source(\"hello, world\") ."; + String input = "@source p[2] : unknown-data-source(\"hello, world\") ."; RuleParser.parseDataSourceDeclaration(input); } @@ -91,10 +98,43 @@ public void testCustomDataSource() throws ParsingException { doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), ArgumentMatchers.any()); - String input = "@source p(2) : mock-source(\"hello\", \"world\") ."; + String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList("hello", "world"); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); } + + @Test + public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("p", 3); + SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), + "var", "?var wdt:P31 wd:Q5 ."); + DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); + RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); + assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("p", 3); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedRdfFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("q", 1); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedCsvFileDataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } } From e4fcf2504eb1de93f7d887ba2a22b3d780b2d943 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 15:54:28 +0100 Subject: [PATCH 0686/1255] Use openjdk8 on xenial --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index fde2269c2..a5985bb38 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,6 +17,8 @@ matrix: - g++-6 - libstdc++6 env: CC=gcc-6 CXX=g++-6 + jdk: + - openjdk8 - os: osx osx_image: xcode10.2 From 7f02a16c5c9774716070cb98aac473b8d56c8f56 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 16:13:42 +0100 Subject: [PATCH 0687/1255] Submit coverage only for the main build --- .travis.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index a5985bb38..908b769d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,8 @@ matrix: dist: bionic jdk: - openjdk11 + after_success: + - mvn clean test jacoco:report coveralls:report - os: linux dist: xenial @@ -35,9 +37,6 @@ jobs: install: mvn install $OPTIONS -DskipTests=true -after_success: - - mvn clean test jacoco:report coveralls:report - sudo: false cache: From 315e3fd800ed69c296cb4764004cf1f80eff1bb1 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 16:37:04 +0100 Subject: [PATCH 0688/1255] Add note on OS compatibility --- README.md | 8 +++++--- RELEASE-NOTES.md | 2 ++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 19d2290a2..e11ad17f7 100644 --- a/README.md +++ b/README.md @@ -28,11 +28,13 @@ You need to use Java 1.8 or above. Available modules include: * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API * **vlog4j-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/vlog4j/wiki/Standalone-client) for VLog4j. -The released packages use vlog4j-base, which packages system-dependent binaries for Linux, MacOS, and Windows, and should work out of the box with current versions of these systems. In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use vlog4j-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * Run [build-vlog-library.sh](https://github.com/knowsys/vlog4j/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./vlog4j-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog4j-base. * Run ```mvn install``` to test if the setup works + + Documentation ------------- @@ -47,5 +49,5 @@ Development * Pull requests are welcome. * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). -* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. -* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. +* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. +* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. \ No newline at end of file diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 2e99a8640..d3c2ed11d 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -15,6 +15,7 @@ Breaking changes: New features: * New module vlog4j-client provides a stand-alone command line client jar for VLog4j * A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki +* The parser behaviour for data source declarations and certain datatype literals can be customised. Other improvements: * Data model is better aligned with syntax supported by parser @@ -27,6 +28,7 @@ Bugfixes: * Acyclicity checks work again without calling reason() first (issue #128) * in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) * in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) +* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. VLog4j v0.4.0 ------------- From 609d70a332024243c31e3cc29213814f47a72200 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Dec 2019 19:58:34 +0100 Subject: [PATCH 0689/1255] Parser: Address review comments --- vlog4j-parser/pom.xml | 182 +++++++++--------- .../parser/DataSourceDeclarationHandler.java | 2 +- .../parser/DatatypeConstantHandler.java | 4 +- .../vlog4j/parser/ParserConfiguration.java | 37 ++-- .../semanticweb/vlog4j/parser/RuleParser.java | 25 +-- .../CsvFileDataSourceDeclarationHandler.java | 2 +- .../RdfFileDataSourceDeclarationHandler.java | 2 +- ...eryResultDataSourceDeclarationHandler.java | 13 +- .../parser/javacc/JavaCCParserBase.java | 21 +- .../parser/RuleParserDataSourceTest.java | 24 ++- 10 files changed, 158 insertions(+), 154 deletions(-) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index 6e3773633..f05d6ef5a 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -24,95 +24,97 @@ vlog4j-core ${project.version} - + - - - - org.codehaus.mojo - javacc-maven-plugin - 2.6 - - - ruleparser - - ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/javacc/ - - - javacc - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.8 - - - generate-sources - - - ${project.build.directory}/generated-sources/javacc/ - - - - add-source - - - - - - - - - - org.eclipse.m2e - lifecycle-mapping - 1.0.0 - - - - - - org.codehaus.mojo - javacc-maven-plugin - [2.6,) - - javacc - - - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - [1.0,) - - parse-version - add-source - maven-version - add-resource - add-test-resource - add-test-source - - - - - true - true - - - - - - - - - - + + + + org.codehaus.mojo + javacc-maven-plugin + 2.6 + + + ruleparser + + ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/javacc/ + + + javacc + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 1.8 + + + generate-sources + + + ${project.build.directory}/generated-sources/javacc/ + + + + add-source + + + + + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.codehaus.mojo + javacc-maven-plugin + [2.6,) + + javacc + + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + [1.0,) + + parse-version + add-source + maven-version + add-resource + add-test-resource + add-test-source + + + + + true + true + + + + + + + + + + diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index d1f8766b1..c191ffa76 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -55,7 +55,7 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto * @throws ParsingException when the given number of Arguments is invalid for * the Data Source. */ - static void verifyCorrectNumberOfArguments(List arguments, int number) throws ParsingException { + static void validateNumberOfArguments(List arguments, int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException("Invalid number of arguments " + arguments.size() + " for Data Source declaration, expected " + number); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java index 7f8ffc3e7..c584b876a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java @@ -23,14 +23,14 @@ import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; /** - * Handler for parsing a custom Data Source declaration. + * Handler for parsing a custom Datatype constant. * * @author Maximilian Marx */ @FunctionalInterface public interface DatatypeConstantHandler { /** - * Parse a Data Source Declaration. + * Parse a datatype constant. * * @param lexicalForm lexical representation of the constant. * diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 1aec212b0..4d75fae0a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -23,6 +23,7 @@ import java.util.HashMap; import java.util.List; +import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -35,6 +36,16 @@ * @author Maximilian Marx */ public class ParserConfiguration { + /** + * The registered data sources. + */ + private HashMap dataSources = new HashMap<>(); + + /** + * The registered datatypes. + */ + private HashMap datatypes = new HashMap<>(); + /** * Register a new Data Source. * @@ -47,9 +58,7 @@ public class ParserConfiguration { */ public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) throws IllegalArgumentException { - if (dataSources.containsKey(name)) { - throw new IllegalArgumentException("Data source \"" + name + "\" is already registered."); - } + Validate.isTrue(!dataSources.containsKey(name), "The Data Source \"%s\" is already registered.", name); this.dataSources.put(name, handler); return this; @@ -96,10 +105,10 @@ public DataSource parseDataSourceDeclaration(String name, List args, */ public Constant parseConstant(String lexicalForm, String languageTag, String datatype) throws ParsingException, IllegalArgumentException { - if (languageTag != null && datatype != null) { - throw new IllegalArgumentException( - "A constant with a language tag may not explicitly specify a data type."); - } else if (languageTag != null) { + Validate.isTrue(languageTag == null || datatype == null, + "A constant with a language tag may not explicitly specify a data type."); + + if (languageTag != null) { return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); } else { String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); @@ -127,21 +136,9 @@ public Constant parseConstant(String lexicalForm, String languageTag, String dat */ public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) throws IllegalArgumentException { - if (datatypes.containsKey(name)) { - throw new IllegalArgumentException("Data type \"" + name + "\" is already registered."); - } + Validate.isTrue(!dataSources.containsKey(name), "The Data type \"%s\" is already registered.", name); this.datatypes.put(name, handler); return this; } - - /** - * The registered data sources. - */ - private HashMap dataSources = new HashMap<>(); - - /** - * The registered datatypes. - */ - private HashMap datatypes = new HashMap<>(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 5d942435a..4c95f04b5 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -48,6 +48,8 @@ */ public class RuleParser { + private static final String DEFAULT_STRING_ENCODING = "UTF-8"; + private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, @@ -60,13 +62,13 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { - parseInto(knowledgeBase, stream, "UTF-8", parserConfiguration); + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, "UTF-8", parserConfiguration); + parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding) @@ -77,12 +79,12 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea } public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream) throws ParsingException { - parseInto(knowledgeBase, stream, "UTF-8"); + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING); } public static void parseInto(final KnowledgeBase knowledgeBase, final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, "UTF-8"); + parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING); } public static KnowledgeBase parse(final InputStream stream, final String encoding, @@ -94,13 +96,13 @@ public static KnowledgeBase parse(final InputStream stream, final String encodin public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { - return parse(stream, "UTF-8", parserConfiguration); + return parse(stream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, "UTF-8", parserConfiguration); + return parse(inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); } public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { @@ -108,12 +110,12 @@ public static KnowledgeBase parse(final InputStream stream, final String encodin } public static KnowledgeBase parse(final InputStream stream) throws ParsingException { - return parse(stream, "UTF-8"); + return parse(stream, DEFAULT_STRING_ENCODING); } public static KnowledgeBase parse(final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, "UTF-8"); + return parse(inputStream, DEFAULT_STRING_ENCODING); } /** @@ -141,20 +143,21 @@ T parse(final JavaCCParser parser) static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + final JavaCCParser localParser = new JavaCCParser(inputStream, DEFAULT_STRING_ENCODING); if (parserConfiguration != null) { localParser.setParserConfiguration(parserConfiguration); } + T result; try { - T result = parserAction.parse(localParser); + result = parserAction.parse(localParser); localParser.ensureEndOfInput(); - return result; } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); } + return result; } public static Rule parseRule(final String input, final ParserConfiguration parserConfiguration) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index dc640dff5..cf585e7e6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -39,7 +39,7 @@ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclaratio @Override public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 29714b972..475f66d59 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -39,7 +39,7 @@ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclaratio @Override public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 1); + DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 1faff9341..ebfacabf3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -45,23 +45,22 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource @Override public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.verifyCorrectNumberOfArguments(arguments, 3); + DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); String endpoint = arguments.get(0); + URL endpointUrl; try { JavaCCParser parser = subParserFactory.makeSubParser(endpoint); - endpoint = parser.IRI(false); + endpointUrl = new URL(parser.IRI(false)); } catch (ParseException | PrefixDeclarationException e) { throw new ParsingException(e); + } catch (MalformedURLException e) { + throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); } String variables = arguments.get(1); String query = arguments.get(2); - try { - return new SparqlQueryResultDataSource(new URL(endpoint), variables, query); - } catch (MalformedURLException e) { - throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); - } + return new SparqlQueryResultDataSource(endpointUrl, variables, query); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7abc4ad4f..b254665a0 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -21,22 +21,19 @@ */ import java.util.HashSet; -import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.core.model.api.Predicate; /** * Basic methods used in the JavaCC-generated parser. @@ -53,23 +50,23 @@ * */ public class JavaCCParserBase { - PrefixDeclarations prefixDeclarations; + protected PrefixDeclarations prefixDeclarations; - KnowledgeBase knowledgeBase; - ParserConfiguration parserConfiguration; + protected KnowledgeBase knowledgeBase; + protected ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. */ - final HashSet bodyVars = new HashSet(); + protected final HashSet bodyVars = new HashSet(); /** * "Local" variable to remember existential head variables during parsing. */ - final HashSet headExiVars = new HashSet();; + protected final HashSet headExiVars = new HashSet();; /** * "Local" variable to remember universal head variables during parsing. */ - final HashSet headUniVars = new HashSet();; + protected final HashSet headUniVars = new HashSet();; /** * Defines the context for parsing sub-formulas. @@ -121,7 +118,9 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) try { return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); } catch (ParsingException e) { - throw new ParseException(e.getMessage()); + ParseException parseException = new ParseException(e.getMessage()); + parseException.initCause(e); + throw parseException; } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 16011fa0f..b345063b6 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -49,31 +49,35 @@ import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; public class RuleParserDataSourceTest { + private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; + private static final String EXAMPLE_CSV_FILE_PATH = "src/main/data/input/example.csv"; + private static final String WIKIDATA_SPARQL_ENDPOINT_URI = "https://query.wikidata.org/sparql"; + @Test public void testCsvSource() throws ParsingException, IOException { - String input = "@source p[2] : load-csv(\"src/main/data/input/example.csv\") ."; - CsvFileDataSource csvds = new CsvFileDataSource(new File("src/main/data/input/example.csv")); + String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; + CsvFileDataSource csvds = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); } @Test public void testRdfSource() throws ParsingException, IOException { - String input = "@source p[3] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; - RdfFileDataSource rdfds = new RdfFileDataSource(new File("src/main/data/input/example.nt.gz")); + String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; + RdfFileDataSource rdfds = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); } @Test(expected = ParsingException.class) public void testRdfSourceInvalidArity() throws ParsingException, IOException { - String input = "@source p[2] : load-rdf(\"src/main/data/input/example.nt.gz\") ."; + String input = "@source p[2] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; RuleParser.parseDataSourceDeclaration(input); } @Test public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + String input = "@source p[2] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( - new URL("https://query.wikidata.org/sparql"), "disease, doid", "?disease wdt:P699 ?doid ."); + new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "disease, doid", "?disease wdt:P699 ?doid ."); assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); } @@ -109,7 +113,7 @@ public void testCustomDataSource() throws ParsingException { public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("p", 3); - SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL("https://example.org/sparql"), + SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "var", "?var wdt:P31 wd:Q5 ."); DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); @@ -120,7 +124,7 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File("src/test/data/input/file.nt")); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -131,7 +135,7 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File("src/test/data/input/file.csv")); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); From 4beb0baa277fe3f4ba1a223468a0f8129b79c74e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:39:19 +0100 Subject: [PATCH 0690/1255] Parser: Don't exclude non-generated JavaCCParserBase from coverage --- pom.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 07c9e2e46..8e831b96f 100644 --- a/pom.xml +++ b/pom.xml @@ -290,14 +290,14 @@ - **/javacc/JavaCCParser* - **/javacc/JavaCCParserConstants* - **/javacc/JavaCCParserTokenManager* - **/javacc/JavaCharStream* - **/javacc/ParseException* - **/javacc/SimpleCharStream* - **/javacc/Token* - **/javacc/TokenMgrError* + **/javacc/JavaCCParser.class + **/javacc/JavaCCParserConstants.class + **/javacc/JavaCCParserTokenManager.class + **/javacc/JavaCharStream.class + **/javacc/ParseException.class + **/javacc/SimpleCharStream.class + **/javacc/Token.class + **/javacc/TokenMgrError.class From a13f7884fb44afdabf08177274d9763ef9ca9eae Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:42:16 +0100 Subject: [PATCH 0691/1255] Parser: Return DataSourceDeclaration from parseDataSourceDeclaration --- .../org/semanticweb/vlog4j/parser/RuleParser.java | 8 ++++---- .../syntax/parser/RuleParserDataSourceTest.java | 15 ++++++++------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 4c95f04b5..50e18558f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -216,13 +216,13 @@ public static Term parseTerm(final String input) throws ParsingException { return parseTerm(input, (ParserConfiguration) null); } - public static DataSource parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) + public static DataSourceDeclaration parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) throws ParsingException { return parseSyntaxFragment(input, RuleParser::parseAndExtractDatasourceDeclaration, "data source declaration", parserConfiguration); } - public static DataSource parseDataSourceDeclaration(final String input) throws ParsingException { + public static DataSourceDeclaration parseDataSourceDeclaration(final String input) throws ParsingException { return parseDataSourceDeclaration(input, null); } @@ -236,7 +236,7 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException return parser.getKnowledgeBase(); } - protected static DataSource parseAndExtractDatasourceDeclaration(final JavaCCParser parser) + protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) throws ParsingException, ParseException, PrefixDeclarationException { parser.source(); @@ -248,7 +248,7 @@ protected static DataSource parseAndExtractDatasourceDeclaration(final JavaCCPar "Unexpected number of data source declarations: " + dataSourceDeclarations.size()); } - return dataSourceDeclarations.get(0).getDataSource(); + return dataSourceDeclarations.get(0); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index b345063b6..90c7074a1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -55,16 +55,16 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { - String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; + String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; CsvFileDataSource csvds = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); - assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input)); + assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; RdfFileDataSource rdfds = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); - assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input)); + assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test(expected = ParsingException.class) @@ -75,10 +75,11 @@ public void testRdfSourceInvalidArity() throws ParsingException, IOException { @Test public void testSparqlSource() throws ParsingException, MalformedURLException { - String input = "@source p[2] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; - SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource( - new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "disease, doid", "?disease wdt:P699 ?doid ."); - assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input)); + String input = "@source p[2] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), + "disease, doid", "?disease wdt:P699 ?doid ."); + assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test(expected = ParsingException.class) From 81cfb25bdcd91aaf189c1954515687014a02eb29 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:43:11 +0100 Subject: [PATCH 0692/1255] Parser: Always keep cause when converting to/from ParseExceptions --- .../vlog4j/parser/javacc/JavaCCParser.jj | 6 ++--- .../parser/javacc/JavaCCParserBase.java | 25 +++++++++++++++---- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 89f542b82..cd1f0c045 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -108,8 +108,6 @@ void source() throws PrefixDeclarationException: int nArity; nArity = Integer.parseInt(arity.image); // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! - if ( dataSource instanceof RdfFileDataSource && nArity != 3 ) - throw new ParseException("Cannot load RDF data into predicate of arity " + nArity +"."); addDataSource(predicateName, nArity, dataSource); } @@ -126,7 +124,7 @@ DataSource dataSource() throws PrefixDeclarationException: try { return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { - throw new ParseException(e.getMessage()); + throw makeParseExceptionWithCause(e); } } } @@ -222,7 +220,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: try { return Expressions.makeFact(predicateName, terms); } catch (IllegalArgumentException e) { - throw new ParseException("Error parsing fact: " + e.toString()); + throw makeParseExceptionWithCause("Error parsing fact: " + e.getMessage(), e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index b254665a0..08d9ab261 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -97,7 +97,7 @@ Constant createConstant(String lexicalForm) throws ParseException { try { return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(lexicalForm)); } catch (PrefixDeclarationException e) { - throw new ParseException(e.getMessage()); + throw makeParseExceptionWithCause(e); } } @@ -105,7 +105,6 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti return createConstant(lexicalForm, null, datatype); } - /** * Creates a suitable {@link Constant} from the parsed data. * @@ -118,9 +117,7 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) try { return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); } catch (ParsingException e) { - ParseException parseException = new ParseException(e.getMessage()); - parseException.initCause(e); - throw parseException; + throw makeParseExceptionWithCause(e); } } @@ -235,6 +232,24 @@ void resetVariableSets() { this.headUniVars.clear(); } + /** + * Convert a throwable into a ParseException. + * + * @param message The error message. + * @param cause The {@link Throwable} that caused this exception. + * + * @return A {@link ParseException} with appropriate cause and message. + */ + protected ParseException makeParseExceptionWithCause(String message, Throwable cause) { + ParseException parseException = new ParseException(message); + parseException.initCause(cause); + return parseException; + } + + protected ParseException makeParseExceptionWithCause(Throwable cause) { + return makeParseExceptionWithCause(cause.getMessage(), cause); + } + public void setKnowledgeBase(KnowledgeBase knowledgeBase) { this.knowledgeBase = knowledgeBase; } From 97b35f5c6bfc5ca68fc70afcdef219765d6fa98a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:44:11 +0100 Subject: [PATCH 0693/1255] Parser: Fix checking for duplicate datatype handlers --- .../vlog4j/parser/ParserConfiguration.java | 2 +- .../parser/ParserConfigurationTest.java | 73 +++++++++++++++++++ 2 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 4d75fae0a..695decb21 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -136,7 +136,7 @@ public Constant parseConstant(String lexicalForm, String languageTag, String dat */ public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) throws IllegalArgumentException { - Validate.isTrue(!dataSources.containsKey(name), "The Data type \"%s\" is already registered.", name); + Validate.isTrue(!datatypes.containsKey(name), "The Data type \"%s\" is already registered.", name); this.datatypes.put(name, handler); return this; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java new file mode 100644 index 000000000..d509fe7f4 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java @@ -0,0 +1,73 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.mockito.Mockito.*; + +import org.junit.Test; +import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; + +public class ParserConfigurationTest { + private static final String TYPE_NAME = "test-type"; + private static final String SOURCE_NAME = "test-source"; + + private final DatatypeConstantHandler datatypeConstantHandler = mock(DatatypeConstantHandler.class); + private final DataSourceDeclarationHandler dataSourceDeclarationHandler = mock(DataSourceDeclarationHandler.class); + + @Test(expected = IllegalArgumentException.class) + public void registerDataSource_duplicateName_throws() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + + parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler) + .registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDatatype_duplicateName_throws() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDatatype(TYPE_NAME, + datatypeConstantHandler); + } + + @Test + public void registerDataSource_datatypeName_succeeds() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDataSource(TYPE_NAME, + dataSourceDeclarationHandler); + } + + @Test + public void registerDatatype_dataSourceName_succeeds() { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler).registerDatatype(SOURCE_NAME, + datatypeConstantHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void parseConstant_languageTagWithExplictDatatype_throws() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.parseConstant("test", "test", "test"); + } + +} From db02e1b4f39acdb3093932d9d1a3b9c3192b3406 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 17:58:16 +0100 Subject: [PATCH 0694/1255] Parser: Expand javadoc w.r.t. DATASOURCE/DataSource type --- .../vlog4j/parser/DataSourceDeclarationHandler.java | 5 ++++- .../vlog4j/parser/ParserConfiguration.java | 12 +++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index c191ffa76..ff2907ca9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -35,13 +35,16 @@ public interface DataSourceDeclarationHandler { /** * Parse a Data Source Declaration. * + * This is called by the parser to instantiate the {@link DataSource} + * component of a {@link DataSourceDeclaration}. + * * @param arguments Arguments given to the Data Source declaration. * @param subParserFactory a factory for obtaining a SubParser, sharing the * parser's state, but bound to new input. * * @throws ParsingException when the given arity or arguments are invalid for * the Data Source. - * @return a @{link DataSource} instance corresponding to the given arguments. + * @return a {@link DataSource} instance corresponding to the given arguments. */ public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 695decb21..c1639a430 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -47,7 +47,14 @@ public class ParserConfiguration { private HashMap datatypes = new HashMap<>(); /** - * Register a new Data Source. + * Register a new (type of) Data Source. + * + * This registers a handler for some custom value of the {@code DATASOURCE} + * production of the rules grammar, corresponding to some {@link DataSource} + * type. + * + * @see <"https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar"> for the + * grammar. * * @param name Name of the data source, as it appears in the declaring * directive. @@ -67,6 +74,9 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration /** * Parse a Data Source declaration. * + * This is called by the parser to construct a {@link DataSourceDeclaration}. It + * is responsible for instantiating an appropriate {@link DataSource} type. + * * @param name Name of the data source. * @param args arguments given in the data source declaration. * @param subParserFactory a {@link SubParserFactory} instance that creates From f21c7cbce904a39801f174eeafb31605c62c3513 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:07:39 +0100 Subject: [PATCH 0695/1255] Parser: Verify arity matches number of variables in SPARQL sources --- .../SparqlQueryResultDataSource.java | 15 ++++++++++----- .../syntax/parser/RuleParserDataSourceTest.java | 8 +++++--- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index 8eb8168b6..cadeeae79 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +23,7 @@ import java.net.URL; import java.util.Iterator; import java.util.LinkedHashSet; +import java.util.Optional; import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; @@ -32,7 +33,7 @@ /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a * given web endpoint. - * + * * @author Irina Dragoste * */ @@ -46,7 +47,7 @@ public class SparqlQueryResultDataSource extends VLogDataSource { /** * Creates a data source from answers to a remote SPARQL query. - * + * * @param endpoint web location of the resource the query will be * evaluated on * @param queryVariables comma-separated list of SPARQL variable names (without @@ -69,7 +70,7 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl /** * Creates a data source from answers to a remote SPARQL query. - * + * * @param endpoint the web location of the resource the query will be * evaluated on. * @param queryVariables the variables of the query, in the given order. The @@ -135,6 +136,10 @@ static String getQueryVariablesList(LinkedHashSet queryVariables) { return sb.toString(); } + public Optional getRequiredArity() { + return Optional.of(this.queryVariables.split(",").length); + } + @Override public int hashCode() { final int prime = 31; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 90c7074a1..509d752e2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -83,8 +83,10 @@ public void testSparqlSource() throws ParsingException, MalformedURLException { } @Test(expected = ParsingException.class) - public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { - String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + public void parseDataSourceDeclaration_sparqlSourceInvalidArity_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; RuleParser.parseDataSourceDeclaration(input); } @@ -113,7 +115,7 @@ public void testCustomDataSource() throws ParsingException { @Test public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - Predicate predicate1 = Expressions.makePredicate("p", 3); + Predicate predicate1 = Expressions.makePredicate("p", 1); SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "var", "?var wdt:P31 wd:Q5 ."); DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); From 6eed41dcd5713baa8aa286c31cf919b04ce5dad4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:08:53 +0100 Subject: [PATCH 0696/1255] Parser: Add more test cases for invalid inputs --- .../parser/RuleParserDataSourceTest.java | 44 +++++++++++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 12 +++++ 2 files changed, 56 insertions(+) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 509d752e2..f569175f1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -90,6 +90,45 @@ public void parseDataSourceDeclaration_sparqlSourceInvalidArity_throws() RuleParser.parseDataSourceDeclaration(input); } + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceMalformedUri_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceUnknownPrefix_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"wdqs:sparql\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceUnparseableUrl_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"wdqs:\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { + String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_csvSourceInvalidPath_throws() throws ParsingException { + String input = "@source p[1] : load-csv(\"\0.csv\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_rdfSourceInvalidPath_throws() throws ParsingException { + String input = "@source p[3] : load-rdf(\"\0.nt\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + @Test(expected = ParsingException.class) public void testUnknownDataSource() throws ParsingException { String input = "@source p[2] : unknown-data-source(\"hello, world\") ."; @@ -144,4 +183,9 @@ public void csvDataSourceDeclarationToStringParsingTest() throws ParsingExceptio RuleParser.parseInto(kb, dataSourceDeclaration.toString()); assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } + + @Test(expected = ParsingException.class) + public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws ParsingException { + RuleParser.parseDataSourceDeclaration("@source p[1] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">) ."); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 58bab4a65..11d685491 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -254,6 +254,18 @@ public void testStringLiteralAllEscapes() throws ParsingException { assertEquals(fact, RuleParser.parseLiteral(input)); } + @Test(expected = ParsingException.class) + public void parseLiteral_invalidEscapeSequence_throws() throws ParsingException { + String input = "p(\"\\ÿ\")"; + RuleParser.parseLiteral(input); + } + + @Test(expected = ParsingException.class) + public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingException { + String input = "p(\"\\\")"; + RuleParser.parseLiteral(input); + } + @Test public void testStringLiteralMultiLine() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") From cbc896379ca7b784d3185dbe723e7794502149fd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:41:44 +0100 Subject: [PATCH 0697/1255] Parser: Require Entity in RuleParser#parseSyntaxFragment --- .../main/java/org/semanticweb/vlog4j/parser/RuleParser.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 50e18558f..3f0a5aa71 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -27,6 +27,7 @@ import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Entity; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -124,7 +125,7 @@ public static KnowledgeBase parse(final String input) throws ParsingException { * This is needed to specify the exceptions thrown by the parse method. */ @FunctionalInterface - interface SyntaxFragmentParser { + interface SyntaxFragmentParser { T parse(final JavaCCParser parser) throws ParsingException, ParseException, PrefixDeclarationException, TokenMgrError; } @@ -140,7 +141,7 @@ T parse(final JavaCCParser parser) * @throws ParsingException when an error during parsing occurs. * @return an appropriate instance of {@code T} */ - static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, + static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); final JavaCCParser localParser = new JavaCCParser(inputStream, DEFAULT_STRING_ENCODING); From d7e1701fc788c688a8662cb77509d62a8c1a5415 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 18:44:17 +0100 Subject: [PATCH 0698/1255] Parser: Rename parserConfiguration#parseDataSourceDeclaration --- .../org/semanticweb/vlog4j/parser/ParserConfiguration.java | 4 ++-- .../java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index c1639a430..c0553fff9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -72,7 +72,7 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration } /** - * Parse a Data Source declaration. + * Parse the source-specific part of a Data Source declaration. * * This is called by the parser to construct a {@link DataSourceDeclaration}. It * is responsible for instantiating an appropriate {@link DataSource} type. @@ -87,7 +87,7 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration * * @return the Data Source instance. */ - public DataSource parseDataSourceDeclaration(String name, List args, + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name, List args, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler handler = dataSources.get(name); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index cd1f0c045..7fe8d5a24 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -122,7 +122,7 @@ DataSource dataSource() throws PrefixDeclarationException: (sourceName = < DIRECTIVENAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > { try { - return parserConfiguration.parseDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); + return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { throw makeParseExceptionWithCause(e); } From 0f8d5400403a54b94dc4d416ff1f2bdeccb01b35 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 19:14:17 +0100 Subject: [PATCH 0699/1255] Parser: Address various review comments --- vlog4j-parser/pom.xml | 6 +++--- .../vlog4j/parser/ParserConfiguration.java | 16 ++++++++++------ .../semanticweb/vlog4j/parser/RuleParser.java | 2 +- .../CsvFileDataSourceDeclarationHandler.java | 3 ++- .../RdfFileDataSourceDeclarationHandler.java | 3 ++- ...QueryResultDataSourceDeclarationHandler.java | 16 +++++++++------- .../vlog4j/parser/javacc/JavaCCParserBase.java | 13 ++++++++----- .../vlog4j/parser/javacc/SubParserFactory.java | 17 +++++++++-------- .../vlog4j/syntax/parser/RuleParserTest.java | 2 +- 9 files changed, 45 insertions(+), 33 deletions(-) diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index f05d6ef5a..ce8616874 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -47,9 +47,9 @@ - org.codehaus.mojo - build-helper-maven-plugin - 1.8 + org.codehaus.mojo + build-helper-maven-plugin + 1.8 generate-sources diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index c0553fff9..34bf895e3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -121,15 +121,19 @@ public Constant parseConstant(String lexicalForm, String languageTag, String dat if (languageTag != null) { return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); } else { - String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); - DatatypeConstantHandler handler = datatypes.get(type); + return parseDatatypeConstant(lexicalForm, datatype); + } + } - if (handler != null) { - return handler.createConstant(lexicalForm); - } + private Constant parseDatatypeConstant(String lexicalForm, String datatype) throws ParsingException { + String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + DatatypeConstantHandler handler = datatypes.get(type); - return Expressions.makeDatatypeConstant(lexicalForm, type); + if (handler != null) { + return handler.createConstant(lexicalForm); } + + return Expressions.makeDatatypeConstant(lexicalForm, type); } /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 3f0a5aa71..431a01d0f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -49,7 +49,7 @@ */ public class RuleParser { - private static final String DEFAULT_STRING_ENCODING = "UTF-8"; + public static final String DEFAULT_STRING_ENCODING = "UTF-8"; private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index cf585e7e6..8b7db9640 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -41,9 +41,10 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); + File file = new File(fileName); try { - return new CsvFileDataSource(new File(fileName)); + return new CsvFileDataSource(file); } catch (IOException e) { throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 475f66d59..a17145e19 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -41,9 +41,10 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); String fileName = arguments.get(0); + File file = new File(fileName); try { - return new RdfFileDataSource(new File(fileName)); + return new RdfFileDataSource(file); } catch (IOException e) { throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index ebfacabf3..71af97bdf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -2,7 +2,7 @@ /*- * #%L - * vlog4j-parser + * VLog4j Parser * %% * Copyright (C) 2018 - 2019 VLog4j Developers * %% @@ -20,15 +20,12 @@ * #L% */ -import java.io.File; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.ParsingException; @@ -48,12 +45,17 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); String endpoint = arguments.get(0); - URL endpointUrl; + JavaCCParser parser = subParserFactory.makeSubParser(endpoint); + String parsedEndpoint; try { - JavaCCParser parser = subParserFactory.makeSubParser(endpoint); - endpointUrl = new URL(parser.IRI(false)); + parsedEndpoint = parser.IRI(false); } catch (ParseException | PrefixDeclarationException e) { throw new ParsingException(e); + } + + URL endpointUrl; + try { + endpointUrl = new URL(parsedEndpoint); } catch (MalformedURLException e) { throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 08d9ab261..7a2fbede1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -23,6 +23,7 @@ import java.util.HashSet; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Predicate; @@ -62,11 +63,11 @@ public class JavaCCParserBase { /** * "Local" variable to remember existential head variables during parsing. */ - protected final HashSet headExiVars = new HashSet();; + protected final HashSet headExiVars = new HashSet(); /** * "Local" variable to remember universal head variables during parsing. */ - protected final HashSet headUniVars = new HashSet();; + protected final HashSet headUniVars = new HashSet(); /** * Defines the context for parsing sub-formulas. @@ -93,12 +94,14 @@ public JavaCCParserBase() { this.parserConfiguration = new DefaultParserConfiguration(); } - Constant createConstant(String lexicalForm) throws ParseException { + AbstractConstant createConstant(String lexicalForm) throws ParseException { + String absoluteIri; try { - return Expressions.makeAbstractConstant(prefixDeclarations.absolutize(lexicalForm)); + absoluteIri = prefixDeclarations.absolutize(lexicalForm); } catch (PrefixDeclarationException e) { throw makeParseExceptionWithCause(e); } + return Expressions.makeAbstractConstant(absoluteIri); } Constant createConstant(String lexicalForm, String datatype) throws ParseException { @@ -124,7 +127,7 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { if (dataSource.getRequiredArity().isPresent()) { Integer requiredArity = dataSource.getRequiredArity().get(); - if (requiredArity != arity) { + if (arity != requiredArity) { throw new ParseException( "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index ddea292b8..1b9deccaa 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -26,6 +26,7 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.RuleParser; /** * Factory for creating a SubParser sharing configuration, state, and @@ -35,6 +36,10 @@ * @author Maximilian Marx */ public class SubParserFactory { + private KnowledgeBase knowledgeBase; + private ParserConfiguration parserConfiguration; + private PrefixDeclarations prefixDeclarations; + /** * Construct a SubParserFactory. * @@ -57,22 +62,18 @@ public class SubParserFactory { */ public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { JavaCCParser subParser = new JavaCCParser(inputStream, encoding); - subParser.setKnowledgeBase(knowledgeBase); - subParser.setPrefixDeclarations(prefixDeclarations); - subParser.setParserConfiguration(parserConfiguration); + subParser.setKnowledgeBase(this.knowledgeBase); + subParser.setPrefixDeclarations(this.prefixDeclarations); + subParser.setParserConfiguration(this.parserConfiguration); return subParser; } public JavaCCParser makeSubParser(final InputStream inputStream) { - return makeSubParser(inputStream, "UTF-8"); + return makeSubParser(inputStream, RuleParser.DEFAULT_STRING_ENCODING); } public JavaCCParser makeSubParser(final String string) { return makeSubParser(new ByteArrayInputStream(string.getBytes())); } - - private KnowledgeBase knowledgeBase; - private ParserConfiguration parserConfiguration; - private PrefixDeclarations prefixDeclarations; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 11d685491..29768f5b1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -367,7 +367,7 @@ public void testBlankPredicateName() throws ParsingException { } @Test(expected = ParsingException.class) - public void testInvalidDatatypeOnLiteral() throws ParsingException { + public void parseLiteral_invalidLiteralString_throws() throws ParsingException { final String input = "P(\"a\")^^whatever"; RuleParser.parseLiteral(input); } From f860a9cbde63a98e9fb294e3561fc127ffc3d160 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 19:44:36 +0100 Subject: [PATCH 0700/1255] Parser: Re-use constants for default data source names --- .../vlog4j/core/model/implementation/Serializer.java | 4 ++-- .../vlog4j/parser/DefaultParserConfiguration.java | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 7726b506f..75dac206a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -63,8 +63,8 @@ public final class Serializer { public static final String AT = "@"; public static final String DATA_SOURCE = "@source "; public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - private static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - private static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; + public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; public static final String DATA_SOURCE_SEPARATOR = ": "; public static final String COLON = ":"; public static final String DOUBLE_CARET = "^^"; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java index 168a738bb..a52f02b87 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -20,6 +20,7 @@ * #L% */ +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; @@ -39,8 +40,9 @@ public DefaultParserConfiguration() { * Register built-in data sources (currently CSV, RDF, SPARQL). */ private void registerDefaultDataSources() { - registerDataSource("load-csv", new CsvFileDataSourceDeclarationHandler()); - registerDataSource("load-rdf", new RdfFileDataSourceDeclarationHandler()); - registerDataSource("sparql", new SparqlQueryResultDataSourceDeclarationHandler()); + registerDataSource(Serializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); + registerDataSource(Serializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); + registerDataSource(Serializer.SPARQL_QUERY_RESULT_DATA_SOURCE, + new SparqlQueryResultDataSourceDeclarationHandler()); } } From bb0183905f9a58ab9773b4ba52517510878bd6ff Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 19:58:19 +0100 Subject: [PATCH 0701/1255] Core: Properly escape file paths when serialising data sources --- .../vlog4j/core/model/implementation/Serializer.java | 2 +- .../vlog4j/syntax/parser/RuleParserDataSourceTest.java | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 75dac206a..164a26f97 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -322,7 +322,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return addQuotes(fileDataSource.getFile().toString()); + return addQuotes(escape(fileDataSource.getFile().toString())); } private static String getIRIString(final String string) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index f569175f1..5a357f407 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -188,4 +188,9 @@ public void csvDataSourceDeclarationToStringParsingTest() throws ParsingExceptio public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws ParsingException { RuleParser.parseDataSourceDeclaration("@source p[1] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">) ."); } + + @Test + public void parseDataSourceDeclaration_windowsStylePathName_success() throws ParsingException, IOException { + RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); + } } From 8d50ad95ac118144c645c59d4bea695af607408f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Dec 2019 20:20:22 +0100 Subject: [PATCH 0702/1255] Core: Handle all(*) escape sequences in Serializer for XSD_STRING (*) except for single quotes, which should be left alone, since we are serialising to double-quoted strings. Fixes #144. --- .../core/model/implementation/Serializer.java | 12 +++++++-- .../vlog4j/syntax/parser/RuleParserTest.java | 27 ++++++++++++++++--- 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 164a26f97..ad91d9aa8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -191,7 +191,7 @@ public static String getConstantName(final LanguageStringConstant languageString */ public static String getString(final DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return addQuotes(datatypeConstant.getLexicalValue()); + return addQuotes(escape(datatypeConstant.getLexicalValue())); } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -335,7 +335,15 @@ private static String getIRIString(final String string) { } private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\""); + return string + .replace("\\", "\\\\") + .replace("\"", "\\\"") + .replace("\t", "\\t") + .replace("\b", "\\b") + .replace("\n", "\\n") + .replace("\r", "\\r") + .replace("\f", "\\f"); + // don't touch single quotes here since we only construct double-quoted strings } private static String addQuotes(final String string) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 29768f5b1..d5bcf7753 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -238,7 +238,7 @@ public void testIncompleteStringLiteral() throws ParsingException { } @Test - public void testStringLiteralEscapes() throws ParsingException { + public void parseLiteral_escapeSequences_success() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); @@ -246,7 +246,14 @@ public void testStringLiteralEscapes() throws ParsingException { } @Test - public void testStringLiteralAllEscapes() throws ParsingException { + public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + } + + @Test + public void parseLiteral_allEscapeSequences_success() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", @@ -254,6 +261,13 @@ public void testStringLiteralAllEscapes() throws ParsingException { assertEquals(fact, RuleParser.parseLiteral(input)); } + @Test + public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + } + @Test(expected = ParsingException.class) public void parseLiteral_invalidEscapeSequence_throws() throws ParsingException { String input = "p(\"\\ÿ\")"; @@ -267,13 +281,20 @@ public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingE } @Test - public void testStringLiteralMultiLine() throws ParsingException { + public void parseLiteral_multiLineLiteral_success() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } + @Test + public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { + PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + } + @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { String input = "p('''abc\ndef'')"; From c4823ab0dc73168b8026ecd08f3fea8c6e88fb71 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 6 Dec 2019 14:42:54 +0100 Subject: [PATCH 0703/1255] Parser: Address review comments --- .../implementation/SparqlQueryResultDataSource.java | 1 + .../vlog4j/parser/DataSourceDeclarationHandler.java | 8 ++++---- .../semanticweb/vlog4j/parser/ParserConfiguration.java | 4 ++-- .../SparqlQueryResultDataSourceDeclarationHandler.java | 2 +- .../org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 2 +- .../vlog4j/parser/javacc/JavaCCParserBase.java | 8 ++------ .../semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 2 +- 7 files changed, 12 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java index cadeeae79..89db26939 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -136,6 +136,7 @@ static String getQueryVariablesList(LinkedHashSet queryVariables) { return sb.toString(); } + @Override public Optional getRequiredArity() { return Optional.of(this.queryVariables.split(",").length); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index ff2907ca9..19ef07ba2 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -35,15 +35,15 @@ public interface DataSourceDeclarationHandler { /** * Parse a Data Source Declaration. * - * This is called by the parser to instantiate the {@link DataSource} - * component of a {@link DataSourceDeclaration}. + * This is called by the parser to instantiate the {@link DataSource} component + * of a {@link DataSourceDeclaration}. * * @param arguments Arguments given to the Data Source declaration. * @param subParserFactory a factory for obtaining a SubParser, sharing the * parser's state, but bound to new input. * - * @throws ParsingException when the given arity or arguments are invalid for - * the Data Source. + * @throws ParsingException when any of the arguments is invalid for the Data + * Source, or the number of arguments is invalid. * @return a {@link DataSource} instance corresponding to the given arguments. */ public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 34bf895e3..a3b16dcba 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -109,8 +109,8 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name * * @throws ParsingException when the lexical form is invalid for the * given data type. - * @throws IllegalArgumentException when neither {@code languageTag} and - * {@code datatype} are null. + * @throws IllegalArgumentException when both {@code languageTag} and + * {@code datatype} are non-null. * @return the {@link Constant} corresponding to the given arguments. */ public Constant parseConstant(String lexicalForm, String languageTag, String datatype) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 71af97bdf..3524fcb0f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -50,7 +50,7 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto try { parsedEndpoint = parser.IRI(false); } catch (ParseException | PrefixDeclarationException e) { - throw new ParsingException(e); + throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); } URL endpointUrl; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 7fe8d5a24..2a127ba9b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -124,7 +124,7 @@ DataSource dataSource() throws PrefixDeclarationException: try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { - throw makeParseExceptionWithCause(e); + throw makeParseExceptionWithCause("Failed while trying to parse the source-specific part of a data source declaration", e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7a2fbede1..63173e270 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -99,7 +99,7 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { try { absoluteIri = prefixDeclarations.absolutize(lexicalForm); } catch (PrefixDeclarationException e) { - throw makeParseExceptionWithCause(e); + throw makeParseExceptionWithCause("Failed to parse IRI", e); } return Expressions.makeAbstractConstant(absoluteIri); } @@ -120,7 +120,7 @@ Constant createConstant(String lexicalForm, String languageTag, String datatype) try { return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); } catch (ParsingException e) { - throw makeParseExceptionWithCause(e); + throw makeParseExceptionWithCause("Failed to parse Constant", e); } } @@ -249,10 +249,6 @@ protected ParseException makeParseExceptionWithCause(String message, Throwable c return parseException; } - protected ParseException makeParseExceptionWithCause(Throwable cause) { - return makeParseExceptionWithCause(cause.getMessage(), cause); - } - public void setKnowledgeBase(KnowledgeBase knowledgeBase) { this.knowledgeBase = knowledgeBase; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index d5bcf7753..971202610 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -411,7 +411,7 @@ public void testIriTypeInDatatypeLiteral() throws ParsingException { @Test public void predicateRelativeNumericIRITest() throws ParsingException { AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); + Fact f = RuleParser.parseFact("<1.e1>(a)."); // 1.e1 == "10"^^xsd:double Fact f2 = Expressions.makeFact("1.e1", a); assertEquals(f, f2); } From b846f1d709a43305406218c1fc78d6152f0b8ba4 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 16:30:30 +0100 Subject: [PATCH 0704/1255] update to release version 0.5.0 --- coverage/pom.xml | 14 +++++++------- pom.xml | 2 +- vlog4j-client/pom.xml | 2 +- vlog4j-core/pom.xml | 2 +- vlog4j-examples/pom.xml | 2 +- vlog4j-graal/pom.xml | 2 +- vlog4j-owlapi/pom.xml | 2 +- vlog4j-parser/pom.xml | 2 +- vlog4j-rdf/pom.xml | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index e158f1d91..0f2270cb3 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 coverage @@ -16,32 +16,32 @@ org.semanticweb.vlog4j vlog4j-core - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-rdf - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-owlapi - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-graal - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-parser - 0.5.0-SNAPSHOT + 0.5.0 org.semanticweb.vlog4j vlog4j-client - 0.5.0-SNAPSHOT + 0.5.0 diff --git a/pom.xml b/pom.xml index 8e831b96f..54f123629 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 pom VLog4j diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index f6f9d5169..7bfe4fc43 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-client diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index a1c646e97..b3c3183c3 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-core diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index 568c6e905..aa3132d08 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-examples diff --git a/vlog4j-graal/pom.xml b/vlog4j-graal/pom.xml index 0b922bcc5..a5b6724e3 100644 --- a/vlog4j-graal/pom.xml +++ b/vlog4j-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-graal diff --git a/vlog4j-owlapi/pom.xml b/vlog4j-owlapi/pom.xml index c4e79c45f..2bb0a6834 100644 --- a/vlog4j-owlapi/pom.xml +++ b/vlog4j-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-owlapi diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index ce8616874..14bce0278 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-parser diff --git a/vlog4j-rdf/pom.xml b/vlog4j-rdf/pom.xml index 1d69e2284..cd75da4e2 100644 --- a/vlog4j-rdf/pom.xml +++ b/vlog4j-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0-SNAPSHOT + 0.5.0 vlog4j-rdf From 89137b8ed83113eed0763f520abed40d1a5b01f1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 6 Dec 2019 16:31:41 +0100 Subject: [PATCH 0705/1255] update Readme to new release version 5.0.5 --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e11ad17f7..2051a94ae 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of VLog4j is version 0.4.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of VLog4j is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` @@ -50,4 +50,4 @@ Development * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. -* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. \ No newline at end of file +* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. From 0b311016129c06fdd9450b0de65891bcdbefebc4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 6 Dec 2019 16:32:21 +0100 Subject: [PATCH 0706/1255] update README to new release version 0.5.0 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2051a94ae..a40a9512e 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ The current release of VLog4j is version 0.5.0. The easiest way of using the lib org.semanticweb.vlog4j vlog4j-core - 0.4.0 + 0.5.0 ``` From 36afeacf15e1dd9c6264649359d8055ad027b917 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 6 Dec 2019 16:36:16 +0100 Subject: [PATCH 0707/1255] update README to link to our syntax in wiki --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a40a9512e..dbb5a647b 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ The current release of VLog4j is version 0.5.0. The easiest way of using the lib You need to use Java 1.8 or above. Available modules include: * **vlog4j-core**: essential data models for rules and facts, and essential reasoner functionality -* **vlog4j-parser**: support for processing knowledge bases in VLog4j syntax +* **vlog4j-parser**: support for processing knowledge bases in [VLog4j syntax](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar) * **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files * **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API @@ -41,7 +41,7 @@ Documentation * The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases * The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language, and related publications. +* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language [examples](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar), and related publications. Development ----------- From 55ca9fed74f28fdb5505b6efe5d8dd507e863e15 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 18:44:57 +0100 Subject: [PATCH 0708/1255] fixed some javadoc errors --- .../vlog4j/client/picocli/ClientUtils.java | 2 + .../client/picocli/PrintQueryResults.java | 6 +- .../vlog4j/client/picocli/SaveModel.java | 8 +- .../client/picocli/SaveQueryResults.java | 8 +- .../vlog4j/core/model/api/Variable.java | 7 +- .../core/model/implementation/FactImpl.java | 13 +-- .../core/model/implementation/Serializer.java | 42 ++++----- .../vlog4j/core/reasoner/KnowledgeBase.java | 86 +++++++++---------- .../vlog4j/core/reasoner/Reasoner.java | 74 ++++++++-------- .../vlog4j/core/reasoner/ReasonerState.java | 12 ++- .../implementation/InMemoryDataSource.java | 35 ++++---- .../vlog4j/examples/ExamplesUtils.java | 8 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 2 +- .../parser/DataSourceDeclarationHandler.java | 5 +- 14 files changed, 152 insertions(+), 156 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java index 35b10b2b0..c0f81099c 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java @@ -96,6 +96,8 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * * @param queryAtom query to be answered * @param reasoner reasoner to query on + * + * @return number of answers to the given query */ public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java index b9d656f3d..1fb824007 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java @@ -59,10 +59,10 @@ public PrintQueryResults(final boolean sizeOnly, final boolean complete) { } /** - * Check correct configuration of the class. @code{--print-query-result-size} - * and @code{--print-query-result} are mutually exclusive. + * Check correct configuration of the class. {@code --print-query-result-size} + * and {@code --print-query-result} are mutually exclusive. * - * @return @code{true} if configuration is valid. + * @return {@code true} if configuration is valid. */ public boolean isValid() { return !this.sizeOnly || !this.complete; diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java index 12be0e9d0..5d8fd08a7 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java @@ -63,10 +63,10 @@ public SaveModel(final boolean saveModel, final String outputDir) { } /** - * Check correct configuration of the class. If @code{--save-model} is true, - * then a non-empty @code{--output-model-directory} is required. + * Check correct configuration of the class. If {@code --save-model} is true, + * then a non-empty {@code --output-model-directory} is required. * - * @return @code{true} if configuration is valid. + * @return {@code true} if configuration is valid. */ public boolean isConfigurationValid() { return !this.saveModel || ((this.outputModelDirectory != null) && !this.outputModelDirectory.isEmpty()); @@ -75,7 +75,7 @@ public boolean isConfigurationValid() { /** * Check that the path to store the model is either non-existing or a directory. * - * @return @code{true} if conditions are satisfied. + * @return {@code true} if conditions are satisfied. */ public boolean isDirectoryValid() { final File file = new File(this.outputModelDirectory); diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java index ce0345bd0..b694cb532 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java @@ -64,10 +64,10 @@ public SaveQueryResults(final boolean saveResults, final String outputDir) { } /** - * Check correct configuration of the class. If @code{--save-query-results} is - * true, then a non-empty @code{--output-query-result-directory} is required. + * Check correct configuration of the class. If {@code --save-query-results} is + * true, then a non-empty {@code --output-query-result-directory} is required. * - * @return @code{true} if configuration is valid. + * @return {@code true} if configuration is valid. */ public boolean isConfigurationValid() { return !this.saveResults @@ -78,7 +78,7 @@ public boolean isConfigurationValid() { * Check that the path to store the query results is either non-existing or a * directory. * - * @return @code{true} if conditions are satisfied. + * @return {@code true} if conditions are satisfied. */ public boolean isDirectoryValid() { final File file = new File(this.outputQueryResultDirectory); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java index ba6a80e16..63b642a93 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java @@ -21,9 +21,10 @@ */ /** - * Interface for variables, i.e., terms of type {@link TermType#VARIABLE}. - * Variables are terms that can be quantified to create formulas that refer to - * some or all values of the domain. + * Interface for variables, i.e., terms of type + * {@link TermType#UNIVERSAL_VARIABLE} and + * {@link TermType#EXISTENTIAL_VARIABLE}. Variables are terms that can be + * quantified to create formulas that refer to some or all values of the domain. * * @author david.carral@tu-dresden.de * @author Markus Krötzsch diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java index d22794133..e1712dd37 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java @@ -28,29 +28,30 @@ import org.semanticweb.vlog4j.core.model.api.Term; /** - * Standard implementation of the {@Fact} interface. + * Standard implementation of the {@link Fact} interface. * * @author Markus Kroetzsch * */ public class FactImpl extends PositiveLiteralImpl implements Fact { - public FactImpl(Predicate predicate, List terms) { + public FactImpl(final Predicate predicate, final List terms) { super(predicate, terms); - for (Term t : terms) { - if (t.isVariable()) + for (final Term t : terms) { + if (t.isVariable()) { throw new IllegalArgumentException("Facts cannot contain variables."); + } } } @Override - public T accept(StatementVisitor statementVisitor) { + public T accept(final StatementVisitor statementVisitor) { return statementVisitor.visit(this); } @Override public String toString() { - return getSyntacticRepresentation(); + return this.getSyntacticRepresentation(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ad91d9aa8..b3c9fa9f7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -88,7 +88,7 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -100,7 +100,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -121,7 +121,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -148,7 +148,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -159,7 +159,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link Constant}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param constant a {@link Constant} * @return String representation corresponding to a given {@link Constant}. */ @@ -171,7 +171,7 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -184,7 +184,7 @@ public static String getConstantName(final LanguageStringConstant languageString * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} without an IRI. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -207,7 +207,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -220,7 +220,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -232,7 +232,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -244,7 +244,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -255,7 +255,7 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -266,7 +266,7 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see <"https://github.com/knowsys/vlog4j/wiki"> for wiki. + * @see Rule syntax . * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -279,7 +279,7 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki">. + * @see Rule syntax .. * * @param csvFileDataSource * @return String representation corresponding to a given @@ -292,7 +292,7 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki">. + * @see Rule syntax .. * * * @param rdfFileDataSource @@ -307,7 +307,7 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see <"https://github.com/knowsys/vlog4j/wiki">. + * @see Rule syntax . * * * @param dataSource @@ -335,14 +335,8 @@ private static String getIRIString(final String string) { } private static String escape(final String string) { - return string - .replace("\\", "\\\\") - .replace("\"", "\\\"") - .replace("\t", "\\t") - .replace("\b", "\\b") - .replace("\n", "\\n") - .replace("\r", "\\r") - .replace("\f", "\\f"); + return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 8e366147d..54c4a256f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -63,18 +63,18 @@ public class KnowledgeBase implements Iterable { */ private class AddStatementVisitor implements StatementVisitor { @Override - public Boolean visit(Fact statement) { - addFact(statement); + public Boolean visit(final Fact statement) { + KnowledgeBase.this.addFact(statement); return true; } @Override - public Boolean visit(Rule statement) { + public Boolean visit(final Rule statement) { return true; } @Override - public Boolean visit(DataSourceDeclaration statement) { + public Boolean visit(final DataSourceDeclaration statement) { KnowledgeBase.this.dataSourceDeclarations.add(statement); return true; } @@ -92,18 +92,18 @@ public Boolean visit(DataSourceDeclaration statement) { private class RemoveStatementVisitor implements StatementVisitor { @Override - public Boolean visit(Fact statement) { - removeFact(statement); + public Boolean visit(final Fact statement) { + KnowledgeBase.this.removeFact(statement); return true; } @Override - public Boolean visit(Rule statement) { + public Boolean visit(final Rule statement) { return true; } @Override - public Boolean visit(DataSourceDeclaration statement) { + public Boolean visit(final DataSourceDeclaration statement) { KnowledgeBase.this.dataSourceDeclarations.remove(statement); return true; } @@ -116,7 +116,7 @@ private class ExtractStatementsVisitor implements StatementVisitor { final ArrayList extracted = new ArrayList<>(); final Class ownType; - ExtractStatementsVisitor(Class type) { + ExtractStatementsVisitor(final Class type) { this.ownType = type; } @@ -126,7 +126,7 @@ ArrayList getExtractedStatements() { @SuppressWarnings("unchecked") @Override - public Void visit(Fact statement) { + public Void visit(final Fact statement) { if (this.ownType.equals(Fact.class)) { this.extracted.add((T) statement); } @@ -135,7 +135,7 @@ public Void visit(Fact statement) { @SuppressWarnings("unchecked") @Override - public Void visit(Rule statement) { + public Void visit(final Rule statement) { if (this.ownType.equals(Rule.class)) { this.extracted.add((T) statement); } @@ -144,7 +144,7 @@ public Void visit(Rule statement) { @SuppressWarnings("unchecked") @Override - public Void visit(DataSourceDeclaration statement) { + public Void visit(final DataSourceDeclaration statement) { if (this.ownType.equals(DataSourceDeclaration.class)) { this.extracted.add((T) statement); } @@ -181,7 +181,7 @@ public Void visit(DataSourceDeclaration statement) { * * @param listener */ - public void addListener(KnowledgeBaseListener listener) { + public void addListener(final KnowledgeBaseListener listener) { this.listeners.add(listener); } @@ -190,7 +190,7 @@ public void addListener(KnowledgeBaseListener listener) { * * @param listener */ - public void deleteListener(KnowledgeBaseListener listener) { + public void deleteListener(final KnowledgeBaseListener listener) { this.listeners.remove(listener); } @@ -199,11 +199,10 @@ public void deleteListener(KnowledgeBaseListener listener) { * Adds a single statement to the knowledge base. * * @param statement the statement to be added - * @return true, if the knowledge base has changed. */ - public void addStatement(Statement statement) { - if (doAddStatement(statement)) { - notifyListenersOnStatementAdded(statement); + public void addStatement(final Statement statement) { + if (this.doAddStatement(statement)) { + this.notifyListenersOnStatementAdded(statement); } } @@ -213,7 +212,7 @@ public void addStatement(Statement statement) { * @param statement the statement to be added * @return true, if the knowledge base has changed. */ - boolean doAddStatement(Statement statement) { + boolean doAddStatement(final Statement statement) { Validate.notNull(statement, "Statement cannot be Null!"); if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { this.statements.add(statement); @@ -227,16 +226,16 @@ boolean doAddStatement(Statement statement) { * * @param statements the statements to be added */ - public void addStatements(Collection statements) { + public void addStatements(final Collection statements) { final List addedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doAddStatement(statement)) { + if (this.doAddStatement(statement)) { addedStatements.add(statement); } } - notifyListenersOnStatementsAdded(addedStatements); + this.notifyListenersOnStatementsAdded(addedStatements); } /** @@ -244,27 +243,26 @@ public void addStatements(Collection statements) { * * @param statements the statements to be added */ - public void addStatements(Statement... statements) { + public void addStatements(final Statement... statements) { final List addedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doAddStatement(statement)) { + if (this.doAddStatement(statement)) { addedStatements.add(statement); } } - notifyListenersOnStatementsAdded(addedStatements); + this.notifyListenersOnStatementsAdded(addedStatements); } /** * Removes a single statement from the knowledge base. * - * @return true, if the knowledge base has changed. * @param statement the statement to remove */ - public void removeStatement(Statement statement) { - if (doRemoveStatement(statement)) { - notifyListenersOnStatementRemoved(statement); + public void removeStatement(final Statement statement) { + if (this.doRemoveStatement(statement)) { + this.notifyListenersOnStatementRemoved(statement); } } @@ -274,7 +272,7 @@ public void removeStatement(Statement statement) { * @param statement the statement to remove * @return true, if the knowledge base has changed. */ - boolean doRemoveStatement(Statement statement) { + boolean doRemoveStatement(final Statement statement) { Validate.notNull(statement, "Statement cannot be Null!"); if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { @@ -289,16 +287,16 @@ boolean doRemoveStatement(Statement statement) { * * @param statements the statements to remove */ - public void removeStatements(Collection statements) { + public void removeStatements(final Collection statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doRemoveStatement(statement)) { + if (this.doRemoveStatement(statement)) { removedStatements.add(statement); } } - notifyListenersOnStatementsRemoved(removedStatements); + this.notifyListenersOnStatementsRemoved(removedStatements); } /** @@ -306,16 +304,16 @@ public void removeStatements(Collection statements) { * * @param statements the statements to remove */ - public void removeStatements(Statement... statements) { + public void removeStatements(final Statement... statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { - if (doRemoveStatement(statement)) { + if (this.doRemoveStatement(statement)) { removedStatements.add(statement); } } - notifyListenersOnStatementsRemoved(removedStatements); + this.notifyListenersOnStatementsRemoved(removedStatements); } private void notifyListenersOnStatementAdded(final Statement addedStatement) { @@ -332,13 +330,13 @@ private void notifyListenersOnStatementsAdded(final List addedStateme } } - private void notifyListenersOnStatementRemoved(Statement removedStatement) { + private void notifyListenersOnStatementRemoved(final Statement removedStatement) { for (final KnowledgeBaseListener listener : this.listeners) { listener.onStatementRemoved(removedStatement); } } - private void notifyListenersOnStatementsRemoved(List removedStatements) { + private void notifyListenersOnStatementsRemoved(final List removedStatements) { if (!removedStatements.isEmpty()) { for (final KnowledgeBaseListener listener : this.listeners) { listener.onStatementsRemoved(removedStatements); @@ -353,7 +351,7 @@ private void notifyListenersOnStatementsRemoved(List removedStatement * @return list of {@link Rule}s */ public List getRules() { - return getStatementsByType(Rule.class); + return this.getStatementsByType(Rule.class); } /** @@ -363,7 +361,7 @@ public List getRules() { * @return list of {@link Fact}s */ public List getFacts() { - return getStatementsByType(Fact.class); + return this.getStatementsByType(Fact.class); } /** @@ -374,10 +372,10 @@ public List getFacts() { * @return list of {@link DataSourceDeclaration}s */ public List getDataSourceDeclarations() { - return getStatementsByType(DataSourceDeclaration.class); + return this.getStatementsByType(DataSourceDeclaration.class); } - List getStatementsByType(Class type) { + List getStatementsByType(final Class type) { final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); for (final Statement statement : this.statements) { statement.accept(visitor); @@ -391,7 +389,7 @@ List getStatementsByType(Class type) { * * @param fact the fact to add */ - void addFact(Fact fact) { + void addFact(final Fact fact) { final Predicate predicate = fact.getPredicate(); this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); this.factsByPredicate.get(predicate).add(fact); @@ -403,7 +401,7 @@ void addFact(Fact fact) { * * @param fact the fact to remove */ - void removeFact(Fact fact) { + void removeFact(final Fact fact) { final Predicate predicate = fact.getPredicate(); final Set facts = this.factsByPredicate.get(predicate); facts.remove(fact); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f391bbe80..1da67d694 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -2,16 +2,17 @@ import java.io.IOException; +import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.TermType; +import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import karmaresearch.vlog.Atom; - /* * #%L * VLog4j Core Components @@ -50,17 +51,17 @@ * * The loaded reasoner can perform atomic queries on explicit and * implicit facts after calling {@link Reasoner#reason()}. Queries can provide - * an iterator for the results ({@link #answerQuery(Atom, boolean)}, or the - * results can be exported to a file - * ({@link #exportQueryAnswersToCsv(Atom, String, boolean)}).
+ * an iterator for the results ({@link #answerQuery(PositiveLiteral, boolean)}, + * or the results can be exported to a file + * ({@link #exportQueryAnswersToCsv(PositiveLiteral, String, boolean)}).
*
* Reasoning with various {@link Algorithm}s is supported, that can lead * to different sets of inferred facts and different termination behavior. In * some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases. To avoid non-termination, a reasoning - * timeout can be set ({@link Reasoner#setReasoningTimeout(Integer)}).
+ * ({@link ExistentialVariable}) may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases. To avoid non-termination, a reasoning timeout can be set + * ({@link Reasoner#setReasoningTimeout(Integer)}).
* * @author Irina Dragoste * @@ -73,7 +74,7 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { * * @return a {@link VLogReasoner} instance. */ - public static Reasoner getInstance() { + static Reasoner getInstance() { final KnowledgeBase knowledgeBase = new KnowledgeBase(); return new VLogReasoner(knowledgeBase); } @@ -104,9 +105,9 @@ public static Reasoner getInstance() { /** * In some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases.
+ * ({@link ExistentialVariable}) may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases.
* This method sets a timeout (in seconds) after which reasoning can be * artificially interrupted if it has not reached completion. * @@ -178,7 +179,7 @@ public static Reasoner getInstance() { * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, * Cyclic, or cyclicity cannot be determined. * - * @return + * @return the appropriate CyclicityResult. */ CyclicityResult checkForCycles(); @@ -259,9 +260,9 @@ public static Reasoner getInstance() { * knowledge base rules.
*
* In some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases.
+ * {@link ExistentialVariable} may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases.
* To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
* @@ -281,11 +282,11 @@ public static Reasoner getInstance() { * the reasoner and the explicit facts materialised by the reasoner.
* An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
+ * {@link Constant} terms of the {@code query} appear in the answer fact at the + * same term position, and the {@link Variable} terms of the {@code query} are + * matched by terms in the fact, either named ({@link Constant}) or anonymous + * ({@link NamedNull}). The same variable name identifies the same term in the + * answer fact.
* A query answer is represented by a {@link QueryResult}. A query can have * multiple, distinct query answers. This method returns an Iterator over these * answers.
@@ -319,12 +320,12 @@ public static Reasoner getInstance() { * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of - * type {@link TermType#NAMED_NULL} (representing anonymous + * type {@link NamedNull} (representing anonymous * individuals introduced to satisfy rule existentially * quantified variables) will be included. Otherwise, the * answers will only contain the {@link QueryResult}s with - * terms of type {@link TermType#CONSTANT} (representing - * named individuals). + * terms of type {@link Constant} (representing named + * individuals). * @return QueryResultIterator that iterates over distinct answers to the query. * It also contains the {@link Correctness} of the query answers. */ @@ -338,11 +339,11 @@ public static Reasoner getInstance() { *
* An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
+ * {@link Constant} terms of the {@code query} appear in the answer fact at the + * same term position, and the {@link Variable} terms of the {@code query} are + * matched by terms in the fact, either named ({@link Constant}) or anonymous + * ({@link NamedNull}). The same variable name identifies the same term in the + * answer fact.
* A query can have multiple, distinct query answers. Each answers is written on * a separate line in the given file. * @@ -353,12 +354,11 @@ public static Reasoner getInstance() { * represents a query answer, and it will contain the fact * term names as columns. * @param includeNulls if {@code true}, answers containing terms of type - * {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain those with terms of type - * {@link TermType#CONSTANT} (representing named - * individuals). + * {@link NamedNull} (representing anonymous individuals + * introduced to satisfy rule existentially quantified + * variables) will be included. Otherwise, the answers will + * only contain those with terms of type {@link Constant} + * (representing named individuals). * * @throws IOException if an I/O error occurs regarding given file * ({@code csvFilePath)}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java index 4fb79e735..5a30d7359 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java @@ -29,14 +29,12 @@ */ public enum ReasonerState { /** - * State a Reasoner is in before method {@link Reasoner#load()} has been called. - * Querying is not allowed in this state. + * State a Reasoner is in before loading. Querying is not allowed in this state. */ KB_NOT_LOADED("knowledge base not loaded"), /** - * State a Reasoner is in after method {@link Reasoner#load()} has been called, - * and before method {@link Reasoner#reason()} has been called. The Reasoner can - * be queried. + * State a Reasoner is in after loading, and before method + * {@link Reasoner#reason()} has been called. The Reasoner can be queried. */ KB_LOADED("knowledge base loaded"), @@ -65,13 +63,13 @@ public enum ReasonerState { private final String name; - private ReasonerState(String name) { + private ReasonerState(final String name) { this.name = name; } @Override public String toString() { - return name; + return this.name; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index e498cacf2..6a290f021 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -23,10 +23,11 @@ import java.util.Arrays; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.Fact; /** * A {@link DataSource} for representing a large number of facts that were - * generated in Java. Rather than making {@link Fact} objects for all of them, + * generated in Java. Rather than creating {@link Fact} objects for all of them, * the object will directly accept tuples of constant names that are internally * stored in a form that can be passed to the reasoner directly, thereby saving * memory and loading time. @@ -50,10 +51,10 @@ public class InMemoryDataSource implements DataSource { * @param arity the number of parameters in a fact from this source * @param initialCapacity the planned number of facts */ - public InMemoryDataSource(int arity, int initialCapacity) { + public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; this.arity = arity; - data = new String[initialCapacity][arity]; + this.data = new String[initialCapacity][arity]; } /** @@ -62,20 +63,20 @@ public InMemoryDataSource(int arity, int initialCapacity) { * * @param constantNames the string names of the constants in this fact */ - public void addTuple(String... constantNames) { - if (constantNames.length != arity) { - throw new IllegalArgumentException("This data source holds tuples of arity " + arity + public void addTuple(final String... constantNames) { + if (constantNames.length != this.arity) { + throw new IllegalArgumentException("This data source holds tuples of arity " + this.arity + ". Adding a tuple of size " + constantNames.length + " is not possible."); } - if (nextEmptyTuple == capacity) { - capacity = capacity * 2; - this.data = Arrays.copyOf(data, capacity); + if (this.nextEmptyTuple == this.capacity) { + this.capacity = this.capacity * 2; + this.data = Arrays.copyOf(this.data, this.capacity); } - data[nextEmptyTuple] = new String[arity]; - for (int i = 0; i < arity; i++) { - data[nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); + this.data[this.nextEmptyTuple] = new String[this.arity]; + for (int i = 0; i < this.arity; i++) { + this.data[this.nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); } - nextEmptyTuple++; + this.nextEmptyTuple++; } /** @@ -85,7 +86,7 @@ public void addTuple(String... constantNames) { * @return the data */ public String[][] getData() { - if (nextEmptyTuple == capacity) { + if (this.nextEmptyTuple == this.capacity) { return this.data; } else { return Arrays.copyOf(this.data, this.nextEmptyTuple); @@ -94,11 +95,11 @@ public String[][] getData() { @Override public String getSyntacticRepresentation() { - StringBuilder sb = new StringBuilder( + final StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < this.getData().length; i++) { - for (int j = 0; j < data[i].length; j++) { - sb.append(data[i][j] + " "); + for (int j = 0; j < this.data[i].length; j++) { + sb.append(this.data[i][j] + " "); } sb.append("\n"); } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index a21ef2999..a75b5a35a 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -100,8 +100,8 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * Prints out the answers given by {@code reasoner} to the query * ({@code queryAtom}). * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on + * @param queryString query to be answered + * @param reasoner reasoner to query on */ public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) { try { @@ -129,8 +129,8 @@ public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Rea * Returns the number of answers returned by {@code reasoner} to the query * ({@code queryAtom}). * - * @param queryAtom query to be answered - * @param reasoner reasoner to query on + * @param queryString query to be answered + * @param reasoner reasoner to query on */ public static int getQueryAnswerCount(final String queryString, final Reasoner reasoner) { try { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java index 055989044..f738449f4 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -45,7 +45,7 @@ /** * This example shows how vlog4j-owlapi library (class * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into - * vlog4j-core {@link Rule}s and {@link Atom}s. + * vlog4j-core {@link Rule}s and {@link Fact}s. * * @author Irina Dragoste * diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 19ef07ba2..180b93053 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -23,6 +23,7 @@ import java.util.List; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -46,7 +47,7 @@ public interface DataSourceDeclarationHandler { * Source, or the number of arguments is invalid. * @return a {@link DataSource} instance corresponding to the given arguments. */ - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -58,7 +59,7 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto * @throws ParsingException when the given number of Arguments is invalid for * the Data Source. */ - static void validateNumberOfArguments(List arguments, int number) throws ParsingException { + static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException("Invalid number of arguments " + arguments.size() + " for Data Source declaration, expected " + number); From 6741fe8c4e16cc7ec2b8b230073dfde7e0c8e94b Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 19:21:01 +0100 Subject: [PATCH 0709/1255] fixed some javadoc errors --- .../graal/GraalToVLog4JModelConverter.java | 8 ++--- .../vlog4j/parser/ParserConfiguration.java | 36 +++++++++---------- .../parser/javacc/SubParserFactory.java | 33 ++++++++--------- 3 files changed, 39 insertions(+), 38 deletions(-) diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java index bec53d7fb..fe44ed0b9 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java @@ -93,8 +93,8 @@ public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom * Atoms} into a {@link List} of {@link PositiveLiteral VLog4J * PositiveLiterals}. * - * @param literals A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Atom - * Graal Atoms}. + * @param literals list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms}. * @return A {@link List} of {@link PositiveLiteral VLog4J PositiveLiterals}. */ public static List convertAtoms(final List atoms) { @@ -109,8 +109,8 @@ public static List convertAtoms(final List convertAtomsToFacts(final List atoms) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index a3b16dcba..770c7fd16 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -26,6 +26,7 @@ import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -39,12 +40,12 @@ public class ParserConfiguration { /** * The registered data sources. */ - private HashMap dataSources = new HashMap<>(); + private final HashMap dataSources = new HashMap<>(); /** * The registered datatypes. */ - private HashMap datatypes = new HashMap<>(); + private final HashMap datatypes = new HashMap<>(); /** * Register a new (type of) Data Source. @@ -53,8 +54,8 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see <"https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar"> for the - * grammar. + * @see + * the grammar. * * @param name Name of the data source, as it appears in the declaring * directive. @@ -63,9 +64,9 @@ public class ParserConfiguration { * @throws IllegalArgumentException if the provided name is already registered. * @return this */ - public ParserConfiguration registerDataSource(String name, DataSourceDeclarationHandler handler) + public ParserConfiguration registerDataSource(final String name, final DataSourceDeclarationHandler handler) throws IllegalArgumentException { - Validate.isTrue(!dataSources.containsKey(name), "The Data Source \"%s\" is already registered.", name); + Validate.isTrue(!this.dataSources.containsKey(name), "The Data Source \"%s\" is already registered.", name); this.dataSources.put(name, handler); return this; @@ -87,9 +88,9 @@ public ParserConfiguration registerDataSource(String name, DataSourceDeclaration * * @return the Data Source instance. */ - public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name, List args, + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, final List args, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler handler = dataSources.get(name); + final DataSourceDeclarationHandler handler = this.dataSources.get(name); if (handler == null) { throw new ParsingException("Data source \"" + name + "\" is not known."); @@ -104,8 +105,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name * @param lexicalForm the (unescaped) lexical form of the constant. * @param languageTag the language tag, or null if not present. * @param the datatype, or null if not present. - * @note At most one of {@code languageTag} and {@code datatype} may be - * non-null. + * @pre At most one of {@code languageTag} and {@code datatype} may be non-null. * * @throws ParsingException when the lexical form is invalid for the * given data type. @@ -113,21 +113,21 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String name * {@code datatype} are non-null. * @return the {@link Constant} corresponding to the given arguments. */ - public Constant parseConstant(String lexicalForm, String languageTag, String datatype) + public Constant parseConstant(final String lexicalForm, final String languageTag, final String datatype) throws ParsingException, IllegalArgumentException { - Validate.isTrue(languageTag == null || datatype == null, + Validate.isTrue((languageTag == null) || (datatype == null), "A constant with a language tag may not explicitly specify a data type."); if (languageTag != null) { return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); } else { - return parseDatatypeConstant(lexicalForm, datatype); + return this.parseDatatypeConstant(lexicalForm, datatype); } } - private Constant parseDatatypeConstant(String lexicalForm, String datatype) throws ParsingException { - String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); - DatatypeConstantHandler handler = datatypes.get(type); + private Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { + final String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + final DatatypeConstantHandler handler = this.datatypes.get(type); if (handler != null) { return handler.createConstant(lexicalForm); @@ -148,9 +148,9 @@ private Constant parseDatatypeConstant(String lexicalForm, String datatype) thro * * @return this */ - public ParserConfiguration registerDatatype(String name, DatatypeConstantHandler handler) + public ParserConfiguration registerDatatype(final String name, final DatatypeConstantHandler handler) throws IllegalArgumentException { - Validate.isTrue(!datatypes.containsKey(name), "The Data type \"%s\" is already registered.", name); + Validate.isTrue(!this.datatypes.containsKey(name), "The Data type \"%s\" is already registered.", name); this.datatypes.put(name, handler); return this; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 1b9deccaa..c4a012baf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.parser.javacc; +import java.io.ByteArrayInputStream; + /*- * #%L * vlog4j-parser @@ -21,7 +23,6 @@ */ import java.io.InputStream; -import java.io.ByteArrayInputStream; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -29,23 +30,23 @@ import org.semanticweb.vlog4j.parser.RuleParser; /** - * Factory for creating a SubParser sharing configuration, state, and - * prefixes, but with an independent input stream, to be used, e.g., - * for parsing arguments in data source declarations. + * Factory for creating a SubParser sharing configuration, state, and prefixes, + * but with an independent input stream, to be used, e.g., for parsing arguments + * in data source declarations. * * @author Maximilian Marx */ public class SubParserFactory { - private KnowledgeBase knowledgeBase; - private ParserConfiguration parserConfiguration; - private PrefixDeclarations prefixDeclarations; + private final KnowledgeBase knowledgeBase; + private final ParserConfiguration parserConfiguration; + private final PrefixDeclarations prefixDeclarations; /** * Construct a SubParserFactory. * - * @argument parser the parser instance to get the state from. + * @param parser the parser instance to get the state from. */ - SubParserFactory(JavaCCParser parser) { + SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); this.prefixDeclarations = parser.getPrefixDeclarations(); this.parserConfiguration = parser.getParserConfiguration(); @@ -54,14 +55,14 @@ public class SubParserFactory { /** * Create a new parser with the specified state and given input. * - * @argument inputStream the input stream to parse. - * @argument encoding encoding of the input stream. + * @param inputStream the input stream to parse. + * @param encoding encoding of the input stream. * - * @return A new {@link JavaCCParser} bound to inputStream and - * with the specified parser state. + * @return A new {@link JavaCCParser} bound to inputStream and with the + * specified parser state. */ public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { - JavaCCParser subParser = new JavaCCParser(inputStream, encoding); + final JavaCCParser subParser = new JavaCCParser(inputStream, encoding); subParser.setKnowledgeBase(this.knowledgeBase); subParser.setPrefixDeclarations(this.prefixDeclarations); subParser.setParserConfiguration(this.parserConfiguration); @@ -70,10 +71,10 @@ public JavaCCParser makeSubParser(final InputStream inputStream, final String en } public JavaCCParser makeSubParser(final InputStream inputStream) { - return makeSubParser(inputStream, RuleParser.DEFAULT_STRING_ENCODING); + return this.makeSubParser(inputStream, RuleParser.DEFAULT_STRING_ENCODING); } public JavaCCParser makeSubParser(final String string) { - return makeSubParser(new ByteArrayInputStream(string.getBytes())); + return this.makeSubParser(new ByteArrayInputStream(string.getBytes())); } } From 7ae5eb3409d904af9e57e621e9ca385de00b466f Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 6 Dec 2019 19:54:36 +0100 Subject: [PATCH 0710/1255] change to snapshot version --- coverage/pom.xml | 14 +++++++------- pom.xml | 2 +- vlog4j-client/pom.xml | 2 +- vlog4j-core/pom.xml | 2 +- vlog4j-examples/pom.xml | 2 +- vlog4j-graal/pom.xml | 2 +- vlog4j-owlapi/pom.xml | 2 +- vlog4j-parser/pom.xml | 2 +- vlog4j-rdf/pom.xml | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 0f2270cb3..16e01e7f8 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT coverage @@ -16,32 +16,32 @@ org.semanticweb.vlog4j vlog4j-core - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-rdf - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-owlapi - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-graal - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-parser - 0.5.0 + 0.6.0-SNAPSHOT org.semanticweb.vlog4j vlog4j-client - 0.5.0 + 0.6.0-SNAPSHOT diff --git a/pom.xml b/pom.xml index 54f123629..e7754337d 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT pom VLog4j diff --git a/vlog4j-client/pom.xml b/vlog4j-client/pom.xml index 7bfe4fc43..0e7b14f00 100644 --- a/vlog4j-client/pom.xml +++ b/vlog4j-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-client diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index b3c3183c3..502f9053c 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-core diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml index aa3132d08..1e23eb09e 100644 --- a/vlog4j-examples/pom.xml +++ b/vlog4j-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-examples diff --git a/vlog4j-graal/pom.xml b/vlog4j-graal/pom.xml index a5b6724e3..7487cf100 100644 --- a/vlog4j-graal/pom.xml +++ b/vlog4j-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-graal diff --git a/vlog4j-owlapi/pom.xml b/vlog4j-owlapi/pom.xml index 2bb0a6834..3433c9bee 100644 --- a/vlog4j-owlapi/pom.xml +++ b/vlog4j-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-owlapi diff --git a/vlog4j-parser/pom.xml b/vlog4j-parser/pom.xml index 14bce0278..b8a7b07f4 100644 --- a/vlog4j-parser/pom.xml +++ b/vlog4j-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-parser diff --git a/vlog4j-rdf/pom.xml b/vlog4j-rdf/pom.xml index cd75da4e2..45c74ed50 100644 --- a/vlog4j-rdf/pom.xml +++ b/vlog4j-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.vlog4j vlog4j-parent - 0.5.0 + 0.6.0-SNAPSHOT vlog4j-rdf From 4480d3590ac591ccd7c21e5c5da1a8fd4a6e3eb7 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 10 Dec 2019 14:20:14 +0100 Subject: [PATCH 0711/1255] fix unit tests failing on Windows OS --- .../vlog4j/core/model/implementation/Serializer.java | 9 ++++++++- .../vlog4j/core/model/DataSourceDeclarationTest.java | 8 +++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index b3c9fa9f7..a95e54529 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -334,7 +334,14 @@ private static String getIRIString(final String string) { } } - private static String escape(final String string) { + /** + * Escapes (with '\') special character occurrences in given {@code string}. The + * special characters are: "\", "'", "\t", "\b", "\n", "\r", "\f". + * + * @param string + * @return an escaped string + */ + public static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index c3ebcd4fb..a2124804a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -90,8 +91,9 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = relativeDirName + File.separator + fileName; - assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); + assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", + dataSourceDeclaration.toString()); } // FIXME: have String representation of files OS independent @@ -116,7 +118,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = relativeDirName + File.separator + fileName; + final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); assertEquals("@source q[1]: load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); } } From d44efaf01cfd1a909961c5b56cf6e02e5c667eb8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 10 Dec 2019 15:20:58 +0100 Subject: [PATCH 0712/1255] make Serializer.escape private --- .../core/model/implementation/Serializer.java | 48 ++++++++++++++++--- .../core/model/DataSourceDeclarationTest.java | 8 ++-- 2 files changed, 45 insertions(+), 11 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a95e54529..0e37fd672 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -177,7 +177,7 @@ public static String getString(final AbstractConstant constant) { * {@link LanguageStringConstant}. */ public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return addQuotes(escape(languageStringConstant.getString())) + AT + languageStringConstant.getLanguageTag(); + return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); } /** @@ -191,7 +191,7 @@ public static String getConstantName(final LanguageStringConstant languageString */ public static String getString(final DatatypeConstant datatypeConstant) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { - return addQuotes(escape(datatypeConstant.getLexicalValue())); + return getString(datatypeConstant.getLexicalValue()); } else { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) @@ -213,7 +213,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * {@link DatatypeConstant}. */ public static String getConstantName(final DatatypeConstant datatypeConstant) { - return addQuotes(escape(datatypeConstant.getLexicalValue())) + DOUBLE_CARET + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + addAngleBrackets(datatypeConstant.getDatatype()); } @@ -322,7 +322,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return addQuotes(escape(fileDataSource.getFile().toString())); + return getString(fileDataSource.getFile().toString()); } private static String getIRIString(final String string) { @@ -335,13 +335,46 @@ private static String getIRIString(final String string) { } /** - * Escapes (with '\') special character occurrences in given {@code string}. The - * special characters are: "\", "'", "\t", "\b", "\n", "\r", "\f". + * Constructs the parseable, serialized representation of given {@code string}. + * Escapes (with {@code \}) special character occurrences in given + * {@code string}, and surrounds the result with double quotation marks + * ({@code "}). The special characters are: + *
+ * * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -250,7 +270,9 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule + * syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -265,7 +287,9 @@ public static String getConstantName(final DatatypeConstant datatypeConstant, * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule + * syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -278,7 +302,9 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -290,7 +316,9 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -302,7 +330,9 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -313,7 +343,9 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -324,7 +356,9 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -337,7 +371,9 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see Rule syntax .. + * @see Rule + * syntax .. * * @param csvFileDataSource * @return String representation corresponding to a given @@ -350,7 +386,9 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see Rule syntax .. + * @see Rule + * syntax .. * * * @param rdfFileDataSource @@ -365,7 +403,9 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see Rule syntax . + * @see Rule + * syntax . * * * @param dataSource @@ -461,7 +501,7 @@ public static String getFactString(Predicate predicate, List terms) { } public static String getFactString(Predicate predicate, List terms, Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + "\n"; + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; } public static String getString(Predicate predicate, List terms) { @@ -486,6 +526,15 @@ public static String getString(Predicate predicate, List terms, Function sb.append(getString(dataSource) + NEW_LINE)); + knowledgeBase.getRules().forEach(rule -> sb.append(getString(rule) + NEW_LINE)); + knowledgeBase.getFacts().forEach(fact -> sb.append(getFactString(fact) + NEW_LINE)); + + return sb.toString(); + } + public static String getBaseString(KnowledgeBase knowledgeBase) { String baseIri = knowledgeBase.getBaseIri(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index e125d3267..79b9a520d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -22,8 +22,10 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -40,6 +42,7 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; @@ -48,6 +51,7 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -59,7 +63,7 @@ * @author Markus Kroetzsch * */ -public class KnowledgeBase implements Iterable { +public class KnowledgeBase implements Entity, Iterable { private final Set listeners = new HashSet<>(); @@ -555,4 +559,33 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE public String unresolveAbsoluteIri(String iri) { return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); } + + @Override + public String getSyntacticRepresentation() { + return Serializer.getString(this); + } + + /** + * Serialise the KnowledgeBase to the {@link OutputStream}. + * + * @param stream the {@link OutputStream} to serialise to. + * + * @throws IOException + */ + public void writeKnowledgeBase(OutputStream stream) throws IOException { + stream.write(getSyntacticRepresentation().getBytes()); + } + + /** + * Serialise the KnowledgeBase to the given {@link File}. + * + * @param filePath path to the file to serialise into. + * + * @throws IOException + */ + public void writeKnowledgeBase(String filePath) throws IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + writeKnowledgeBase(stream); + } + } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index b44f89c24..1fd02f50a 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +22,9 @@ import static org.junit.Assert.*; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; import java.util.Arrays; import org.junit.Before; @@ -117,4 +120,11 @@ public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationExc assertEquals(this.kb.resolvePrefixedName("ex:test"), iri + "test"); assertEquals(this.kb.unresolveAbsoluteIri(iri + "test"), "ex:test"); } + + @Test + public void writeKnowledgeBase_justFacts_succeeds() throws IOException { + OutputStream stream = new ByteArrayOutputStream(); + this.kb.writeKnowledgeBase(stream); + assertEquals("P(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + } } From 28a4d53c45a191d7ba43ec69917a15a3921cb389 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 3 Mar 2020 18:03:08 +0100 Subject: [PATCH 0844/1255] Core: Make KnowledgeBase.writeKnowledgeBase() lazy --- .../core/model/implementation/Serializer.java | 9 ----- .../rulewerk/core/reasoner/KnowledgeBase.java | 27 +++++++++----- .../core/reasoner/KnowledgeBaseTest.java | 35 +++++++++++++++++++ 3 files changed, 53 insertions(+), 18 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index ef0c9f766..55869f980 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -526,15 +526,6 @@ public static String getString(Predicate predicate, List terms, Function sb.append(getString(dataSource) + NEW_LINE)); - knowledgeBase.getRules().forEach(rule -> sb.append(getString(rule) + NEW_LINE)); - knowledgeBase.getFacts().forEach(fact -> sb.append(getFactString(fact) + NEW_LINE)); - - return sb.toString(); - } - public static String getBaseString(KnowledgeBase knowledgeBase) { String baseIri = knowledgeBase.getBaseIri(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 79b9a520d..09ce844cc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -42,7 +42,6 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; @@ -63,7 +62,7 @@ * @author Markus Kroetzsch * */ -public class KnowledgeBase implements Entity, Iterable { +public class KnowledgeBase implements Iterable { private final Set listeners = new HashSet<>(); @@ -473,7 +472,7 @@ public interface AdditionalInputParser { * @throws IOException when reading {@code file} fails * @throws IllegalArgumentException when {@code file} is null or has already * been imported - * @throws RulewerkException when parseFunction throws RulewerkException + * @throws RulewerkException when parseFunction throws RulewerkException */ public void importRulesFile(File file, AdditionalInputParser parseFunction) throws RulewerkException, IOException, IllegalArgumentException { @@ -560,11 +559,6 @@ public String unresolveAbsoluteIri(String iri) { return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); } - @Override - public String getSyntacticRepresentation() { - return Serializer.getString(this); - } - /** * Serialise the KnowledgeBase to the {@link OutputStream}. * @@ -573,7 +567,22 @@ public String getSyntacticRepresentation() { * @throws IOException */ public void writeKnowledgeBase(OutputStream stream) throws IOException { - stream.write(getSyntacticRepresentation().getBytes()); + stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); + + for (DataSourceDeclaration dataSource : getDataSourceDeclarations()) { + stream.write(Serializer.getString(dataSource).getBytes()); + stream.write('\n'); + } + + for (Rule rule : getRules()) { + stream.write(Serializer.getString(rule).getBytes()); + stream.write('\n'); + } + + for (Fact fact : getFacts()) { + stream.write(Serializer.getFactString(fact).getBytes()); + stream.write('\n'); + } } /** diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 1fd02f50a..15c633f86 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -25,6 +25,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.net.URL; import java.util.Arrays; import org.junit.Before; @@ -32,8 +33,12 @@ import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class KnowledgeBaseTest { @@ -41,6 +46,11 @@ public class KnowledgeBaseTest { private final Fact fact1 = Expressions.makeFact("P", Expressions.makeAbstractConstant("c")); private final Fact fact2 = Expressions.makeFact("P", Expressions.makeAbstractConstant("d")); private final Fact fact3 = Expressions.makeFact("Q", Expressions.makeAbstractConstant("c")); + private final PositiveLiteral literal1 = Expressions.makePositiveLiteral("P", + Expressions.makeUniversalVariable("X")); + private final PositiveLiteral literal2 = Expressions.makePositiveLiteral("Q", + Expressions.makeUniversalVariable("X")); + private final Rule rule = Expressions.makeRule(literal1, literal2); @Before public void initKB() { @@ -127,4 +137,29 @@ public void writeKnowledgeBase_justFacts_succeeds() throws IOException { this.kb.writeKnowledgeBase(stream); assertEquals("P(c) .\nP(d) .\nQ(c) .\n", stream.toString()); } + + @Test + public void writeKnowledgeBase_withBase_succeeds() throws IOException { + String baseIri = "https://example.org/"; + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(baseIri); + this.kb.mergePrefixDeclarations(prefixDeclarations); + OutputStream stream = new ByteArrayOutputStream(); + this.kb.writeKnowledgeBase(stream); + assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + } + + @Test + public void writeKnowledgeBase_alsoRuleAndDataSource_succeeds() throws IOException { + String sparqlIri = "https://example.org/sparql"; + String sparqlBgp = "?X ?p []"; + this.kb.addStatement(rule); + this.kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("S", 1), + new SparqlQueryResultDataSource(new URL(sparqlIri), "?X", sparqlBgp))); + + OutputStream stream = new ByteArrayOutputStream(); + this.kb.writeKnowledgeBase(stream); + assertEquals("@source S[1]: sparql(<" + sparqlIri + ">, \"?X\", \"" + sparqlBgp + + "\") .\nP(?X) :- Q(?X) .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + } } From 694458a4e56acb2bfcee9c51caf908a58f00ca2d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Mar 2020 14:29:24 +0100 Subject: [PATCH 0845/1255] Core: Fix handling of FileDataSources with backslashes in their name --- .../rulewerk/core/reasoner/implementation/FileDataSource.java | 4 ++-- .../rulewerk/core/model/DataSourceDeclarationTest.java | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index d65bc7af1..4d9e0ea82 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -68,8 +68,8 @@ public FileDataSource(final String filePath, final Iterable possibleExte Validate.notBlank(filePath, "Data source file name cannot be null!"); this.file = new File(filePath); - this.filePath = filePath.replaceAll("\\\\", "/"); // canonicalise windows-style path separators - this.fileName = this.filePath.substring(this.filePath.lastIndexOf("/") + 1); // just the file name + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = this.file.getName(); this.extension = getValidExtension(this.fileName, possibleExtensions); this.fileNameWithoutExtension = this.fileName.substring(0, this.fileName.lastIndexOf(this.extension)); this.dirCanonicalPath = Paths.get(file.getCanonicalPath()).getParent().toString(); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 7c5ad3cba..93f52f6c4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -99,10 +99,11 @@ public void toString_CsvFileDataSource() throws IOException { public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { final Predicate predicate = Expressions.makePredicate("q", 1); final String absoluteFilePathWindows = "D:\\input\\file.csv"; + final String escapedPath = absoluteFilePathWindows.replace("\\", "\\\\"); final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(absoluteFilePathWindows); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); - assertEquals("@source q[1]: load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(\"" + escapedPath + "\") .", dataSourceDeclaration.toString()); } @Test From 439ff8ee816ab07f9e388abf16bf710185267a4f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 4 Mar 2020 14:52:40 +0100 Subject: [PATCH 0846/1255] Core: Improve validation in FileDataSource --- .../rulewerk/core/model/implementation/Serializer.java | 2 +- .../rulewerk/core/reasoner/implementation/FileDataSource.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 55869f980..a77634d3c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -420,7 +420,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath().toString()); + return getString(fileDataSource.getPath()); } private static String getIRIString(final String string) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 4d9e0ea82..ec902eb00 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -65,7 +65,7 @@ public abstract class FileDataSource extends VLogDataSource { * {@code possibleExtensions}. */ public FileDataSource(final String filePath, final Iterable possibleExtensions) throws IOException { - Validate.notBlank(filePath, "Data source file name cannot be null!"); + Validate.notBlank(filePath, "Data source file path cannot be blank!"); this.file = new File(filePath); this.filePath = filePath; // unmodified file path, necessary for correct serialisation From f6be085c2e176b14fbff55822f3937ad208d1827 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 5 Mar 2020 19:49:21 +0100 Subject: [PATCH 0847/1255] Address further review comments --- .../model/api/PrefixDeclarationRegistry.java | 19 ++++++--- .../AbstractPrefixDeclarationRegistry.java | 8 ++-- .../MergingPrefixDeclarationRegistry.java | 39 +++++++++++-------- .../core/model/implementation/Serializer.java | 2 +- .../implementation/Skolemization.java | 36 +++++++---------- .../parser/ConfigurableLiteralHandler.java | 12 ++++-- .../rulewerk/parser/DirectiveHandler.java | 38 +++++++----------- .../LocalPrefixDeclarationRegistry.java | 22 ++++++----- .../rulewerk/parser/ParserConfiguration.java | 2 +- .../ImportFileDirectiveHandler.java | 8 ++-- .../ImportFileRelativeDirectiveHandler.java | 9 +++-- .../parser/javacc/JavaCCParserBase.java | 8 ++-- 12 files changed, 109 insertions(+), 94 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index b9dc7386a..2ec1c15bc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -82,16 +82,25 @@ public interface PrefixDeclarationRegistry extends Iterableprefixed + * name into an absolute IRI. + * + * @param prefixedName a prefixed name of the form prefixName:localName. + * + * @throws PrefixDeclarationException when the prefixName has not been declared. + * @return an absolute IRI corresponding to prefixedName. */ String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; /** * Turn a potentially relative IRI into an absolute IRI. * - * @param potentiallyRelativeIri an IRI. - * @throws PrefixDeclarationException when called on a prefixedName using an - * unknown prefixName. + * @param relativeOrAbsoluteIri an IRI that may be relative or absolute. + * @throws PrefixDeclarationException when relativeOrAbsoluteIri is not a valid + * IRI. + * + * @return when relativeOrAbsoluteIri is an absolute IRI, it is returned as-is. + * Otherwise, the current base IRI is prepended. */ - String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarationException; + String absolutizeIri(String relativeOrAbsoluteIri) throws PrefixDeclarationException; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 48afd6a1e..892f2d33b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -44,15 +44,15 @@ public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclara /** * Iri holding the base namespace. */ - protected String baseUri = null; + protected String baseIri = null; @Override public String getBaseIri() { - if (baseUri == null) { - baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + if (baseIri == null) { + baseIri = PrefixDeclarationRegistry.EMPTY_BASE; } - return baseUri; + return baseIri; } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 94570bd0a..11e42fb88 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Map.Entry; +import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** @@ -41,12 +42,13 @@ final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclar /** * Next index to use for generated prefix names. */ - private long nextIndex = 0; + private Integer nextIndex = 0; /** - * Prefix string to use for generated prefix name + * Template string to use for generated prefix name */ - private static final String GENERATED_PREFIX_PREFIX_STRING = "rulewerk_generated_"; + private static final String GENERATED_PREFIX_TEMPLATE = "rulewerk_generated_%d" + + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; public MergingPrefixDeclarationRegistry() { super(); @@ -59,26 +61,28 @@ public MergingPrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDe /** * Sets the base namespace to the given value. If a base Iri has already been - * set, it will be added as a prefix declaration with a fresh prefixName. + * set, one of them will be added as a prefix declaration with a fresh + * prefixName. * * @param baseIri the new base namespace. */ @Override public void setBaseIri(String baseIri) { - if (baseIri == this.baseUri) { + Validate.notNull(baseIri, "baseIri must not be null"); + if (baseIri == this.baseIri) { return; } - if (this.baseUri == null) { - this.baseUri = baseIri; - } else if (this.baseUri == PrefixDeclarationRegistry.EMPTY_BASE) { + if (this.baseIri == null) { + this.baseIri = baseIri; + } else if (this.baseIri == PrefixDeclarationRegistry.EMPTY_BASE) { // we need to keep the empty base, so that we don't // accidentally relativise absolute Iris to // baseIri. Hence, introduce baseIri as a fresh prefix. prefixes.put(getFreshPrefix(), baseIri); } else { - prefixes.put(getFreshPrefix(), this.baseUri); - this.baseUri = baseIri; + prefixes.put(getFreshPrefix(), this.baseIri); + this.baseIri = baseIri; } } @@ -109,7 +113,7 @@ public String unresolveAbsoluteIri(String iri) { String baseIri = getBaseIri(); if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseIri) && !iri.equals(baseIri)) { - matches.put(iri.replaceFirst(baseUri, PrefixDeclarationRegistry.EMPTY_BASE), baseUri.length()); + matches.put(iri.replaceFirst(baseIri, PrefixDeclarationRegistry.EMPTY_BASE), baseIri.length()); } prefixes.forEach((prefixName, prefixIri) -> { @@ -145,13 +149,16 @@ public void mergePrefixDeclarations(final PrefixDeclarationRegistry other) { } } + private String getNextFreshPrefixCandidate() { + return String.format(GENERATED_PREFIX_TEMPLATE, this.nextIndex++); + } + private String getFreshPrefix() { - for (long idx = nextIndex; true; ++idx) { - String freshPrefix = GENERATED_PREFIX_PREFIX_STRING + idx + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; + while (true) { + String candidate = getNextFreshPrefixCandidate(); - if (!prefixes.containsKey(freshPrefix)) { - this.nextIndex = idx + 1; - return freshPrefix; + if (!prefixes.containsKey(candidate)) { + return candidate; } } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index a77634d3c..a788f3367 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -277,7 +277,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getConstantName(final DatatypeConstant datatypeConstant, + private static String getConstantName(final DatatypeConstant datatypeConstant, Function iriTransformer) { return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + getIRIString(datatypeConstant.getDatatype(), iriTransformer); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index d11d26849..3cd07bcb6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,19 +21,16 @@ */ import java.io.ByteArrayOutputStream; -import java.io.IOException; import java.util.UUID; -import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** - * A class that implements skolemization of named null names. The same - * name should be skolemized to the same {@link NamedNull} when - * skolemized using the same instance, but to different instances of - * {@link NamedNull} when skolemized using different instances of - * {@link Skolemization}. + * A class that implements skolemization of named null names. The same name + * should be skolemized to the same {@link NamedNull} when skolemized using the + * same instance, but two different instances of {@link NamedNull} when + * skolemized using different instances of {@link Skolemization}. * * @author Maximilian Marx */ @@ -44,21 +41,18 @@ public class Skolemization { private final byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); /** - * Skolemize a named null name. The same {@code name} will map to - * a {@link RenamedNamedNull} instance with the same name when - * called on the same instance. + * Skolemize a named null name. The same {@code name} will map to a + * {@link RenamedNamedNull} instance with the same name when called on the same + * instance. * - * @return a {@link RenamedNamedNull} instance with a new name - * that is specific to this instance and {@code name}. + * @return a {@link RenamedNamedNull} instance with a new name that is specific + * to this instance and {@code name}. */ public RenamedNamedNull skolemizeNamedNull(String name) { + byte[] nameBytes = name.getBytes(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); - try { - stream.write(namedNullNamespace); - stream.write(name.getBytes()); - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); - } catch (IOException e) { - throw new RulewerkRuntimeException(e.getMessage(), e); - } + stream.write(namedNullNamespace, 0, namedNullNamespace.length); + stream.write(nameBytes, 0, nameBytes.length); + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index 0bf3ea7be..5b8fddbdd 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,11 @@ import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** - * Handler for parsing a configurable literal expression. + * Handler for parsing a configurable literal expression. Note that these are + * not Literals in the logical sense (i.e., Atoms with a specific polarity), but + * rather expressions in the sense of + * RDF literals; + * essentially adding further quoted literals with custom delimiters. * * @author Maximilian Marx */ @@ -38,7 +42,7 @@ public interface ConfigurableLiteralHandler { * parser's state, but bound to new input. * * @throws ParsingException when the given syntactic form is invalid. - * @return an appropriate @{link Constant} instance. + * @return an appropriate @{link Term} instance. */ public Term parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 01c9fc73c..146faf39c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,6 +25,7 @@ import java.net.MalformedURLException; import java.net.URI; import java.net.URL; +import java.nio.file.InvalidPathException; import java.util.List; import java.util.NoSuchElementException; @@ -58,7 +59,7 @@ public T handleDirective(List arguments, final SubParserFacto throws ParsingException; /** - * Validate the provided number of arguments to the data source. + * Validate the provided number of arguments to the directive statement. * * @param arguments Arguments given to the Directive statement. * @param number expected number of arguments @@ -87,23 +88,20 @@ public static void validateNumberOfArguments(final List argum */ public static String validateStringArgument(final DirectiveArgument argument, final String description) throws ParsingException { - try { - return argument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); - } + return argument.fromString() + .orElseThrow(() -> new ParsingException("description \"" + argument + "\" is not a string.")); } /** - * Validate that the provided argument is a file name. + * Validate that the provided argument is a file path. * * @param argument the argument to validate * @param description a description of the argument, used in constructing the * error message. * - * @throws ParsingException when the given argument is not a valid file name. + * @throws ParsingException when the given argument is not a valid file path. * - * @return the File corresponding to the contained file name. + * @return the File corresponding to the contained file path. */ public static File validateFilenameArgument(final DirectiveArgument argument, final String description) throws ParsingException { @@ -111,8 +109,8 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi File file = new File(fileName); try { // we don't care about the actual path, just that there is one. - file.getCanonicalPath(); - } catch (IOException e) { + file.toPath(); + } catch (InvalidPathException e) { throw new ParsingException(description + "\"" + argument + "\" is not a valid file path.", e); } @@ -132,11 +130,8 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi */ public static URI validateIriArgument(final DirectiveArgument argument, final String description) throws ParsingException { - try { - return argument.fromIri().get(); - } catch (NoSuchElementException e) { - throw new ParsingException(description + "\"" + argument + "\" is not an IRI.", e); - } + return argument.fromIri() + .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not an IRI.")); } /** @@ -173,11 +168,8 @@ public static URL validateUrlArgument(final DirectiveArgument argument, final St */ public static Term validateTermArgument(final DirectiveArgument argument, final String description) throws ParsingException { - try { - return argument.fromTerm().get(); - } catch (NoSuchElementException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); - } + return argument.fromTerm() + .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not a Term.")); } /** diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index a72def47d..1f13e7799 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.parser; +import org.apache.commons.lang3.Validate; + /*- * #%L * Rulewerk Parser @@ -56,21 +58,22 @@ public LocalPrefixDeclarationRegistry() { */ public LocalPrefixDeclarationRegistry(String fallbackIri) { super(); + Validate.notNull(fallbackIri, "fallbackIri must not be null"); this.fallbackIri = fallbackIri; } /** * Returns the relevant base namespace. Returns the fallback IRI if no base - * namespace has been set yet. + * namespace has been set yet, and sets that as the base IRI. * * @return string of an absolute base IRI */ @Override public String getBaseIri() { - if (this.baseUri == null) { - this.baseUri = this.fallbackIri; + if (this.baseIri == null) { + this.baseIri = this.fallbackIri; } - return baseUri.toString(); + return baseIri; } @Override @@ -87,15 +90,16 @@ public void setPrefixIri(String prefixName, String prefixIri) throws PrefixDecla * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. * - * @param baseUri the new base namespace + * @param baseIri the new base namespace * @throws PrefixDeclarationException if base was already defined */ @Override - public void setBaseIri(String baseUri) throws PrefixDeclarationException { - if (this.baseUri != null) + public void setBaseIri(String baseIri) throws PrefixDeclarationException { + Validate.notNull(baseIri, "baseIri must not be null"); + if (this.baseIri != null) throw new PrefixDeclarationException( - "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); - this.baseUri = baseUri; + "Base is already defined as <" + this.baseIri + "> and cannot be re-defined as " + baseIri); + this.baseIri = baseIri; } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 5843f1db7..8213f30a1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -299,7 +299,7 @@ public ParserConfiguration disallowNamedNulls() { * Whether parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull} is * allowed. * - * @return this + * @return true iff parsing of NamedNulls is allowed. */ public boolean isParsingOfNamedNullsAllowed() { return this.allowNamedNulls; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 337475363..9c24f2ab1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,9 +21,11 @@ */ import java.io.File; +import java.io.IOException; import java.io.InputStream; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.DirectiveHandler; @@ -51,7 +53,7 @@ public KnowledgeBase handleDirective(List arguments, final Su knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration); }); - } catch (Exception e) { + } catch (RulewerkException | IOException | IllegalArgumentException e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 1de8df9f5..62b5e246f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,9 +21,12 @@ */ import java.io.File; +import java.io.IOException; import java.io.InputStream; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.DirectiveArgument; @@ -52,7 +55,7 @@ public KnowledgeBase handleDirective(List arguments, SubParse knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarationRegistry.getBaseIri()); }); - } catch (Exception e) { + } catch (RulewerkException | IOException | IllegalArgumentException e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 3e38aefde..a7878d797 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -148,8 +148,8 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { /** * Creates a suitable {@link Constant} from the parsed data. * - * @param string the string data (unescaped) - * @param datatype the datatype, or null if not provided + * @param lexicalForm the string data (unescaped) + * @param datatype the datatype, or null if not provided * @return suitable constant */ Constant createConstant(String lexicalForm, String datatype) throws ParseException { From 95254588c20452f85a9dabf76f878307359e0020 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Sun, 8 Mar 2020 04:38:54 +0100 Subject: [PATCH 0848/1255] Core: Fix typo. --- .../rulewerk/core/reasoner/implementation/Skolemization.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 3cd07bcb6..f60081c15 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -29,7 +29,7 @@ /** * A class that implements skolemization of named null names. The same name * should be skolemized to the same {@link NamedNull} when skolemized using the - * same instance, but two different instances of {@link NamedNull} when + * same instance, but to two different instances of {@link NamedNull} when * skolemized using different instances of {@link Skolemization}. * * @author Maximilian Marx From c941c8ab2cc330c8910f9a08804d8e7cc52fc725 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 16 Mar 2020 11:32:47 +0100 Subject: [PATCH 0849/1255] Core: Fix script to build local VLog --- build-vlog-library.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index 02ecd116f..e82a046e0 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -21,6 +21,6 @@ else cd ../../.. fi -mkdir local_builds/jvlog.jar rulewerk-core/lib +mkdir -p rulewerk-core/lib cp local_builds/jvlog.jar rulewerk-core/lib/jvlog-local.jar mvn initialize -Pdevelopment From acc9f03947b5ce284042cf465509a0e899d4f1cf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 15:31:03 +0200 Subject: [PATCH 0850/1255] Fix search box when building javadoc usig JDK9+ Fixes #170. --- pom.xml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pom.xml b/pom.xml index d3e85c1e7..944c4f5fb 100644 --- a/pom.xml +++ b/pom.xml @@ -407,6 +407,24 @@ + + java-9 + + [9,) + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven.javadoc.version} + + --no-module-directories + + + + + From 3022f41553c8ff8a81a3aed7bc3176284c9e9f7d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 15:31:31 +0200 Subject: [PATCH 0851/1255] Fix javadoc warnings --- .../model/implementation/Expressions.java | 2 +- .../core/model/implementation/Serializer.java | 72 +++++-------------- .../rulewerk/core/reasoner/KnowledgeBase.java | 4 +- .../owlapi/AbstractClassToRuleConverter.java | 18 ++--- .../parser/DatatypeConstantHandler.java | 6 +- .../LocalPrefixDeclarationRegistry.java | 2 +- .../rulewerk/parser/ParserConfiguration.java | 3 +- 7 files changed, 35 insertions(+), 72 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index 74529fb51..d1b62815a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -302,7 +302,7 @@ public static Conjunction makeConjunction(final Literal... literals) { } /** - * Creates a {@code Conjunction} of {@link T} ({@link PositiveLiteral} type) + * Creates a {@code Conjunction} of {@code T} ({@link PositiveLiteral} type) * objects. * * @param literals list of non-null positive literals diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index a788f3367..b4c6d07cd 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -94,9 +94,7 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see Rule - * syntax . + * @see Rule syntax * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -108,9 +106,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see Rule - * syntax . + * @see Rule syntax * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -131,9 +127,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see Rule - * syntax . + * @see Rule syntax * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -149,9 +143,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see Rule - * syntax . + * @see Rule syntax * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -162,9 +154,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link AbstractConstant}. * - * @see Rule - * syntax . + * @see Rule syntax * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -177,9 +167,7 @@ public static String getString(final AbstractConstant constant, FunctionRule - * syntax . + * @see Rule syntax * @param constant a {@link AbstractConstant} * @return String representation corresponding to a given * {@link AbstractConstant}. @@ -192,9 +180,7 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see Rule - * syntax . + * @see Rule syntax * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -221,9 +207,7 @@ public static String getConstantName(final LanguageStringConstant languageString * IRI. * * - * @see Rule - * syntax . + * @see Rule syntax * @param datatypeConstant a {@link DatatypeConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -270,9 +254,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule - * syntax . + * @see Rule syntax * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -287,9 +269,7 @@ private static String getConstantName(final DatatypeConstant datatypeConstant, * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule - * syntax . + * @see Rule syntax * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -302,9 +282,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see Rule - * syntax . + * @see Rule syntax * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -316,9 +294,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see Rule - * syntax . + * @see Rule syntax * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -330,9 +306,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see Rule - * syntax . + * @see Rule syntax * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -343,9 +317,7 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see Rule - * syntax . + * @see Rule syntax * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -356,9 +328,7 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see Rule - * syntax . + * @see Rule syntax * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -371,9 +341,7 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see Rule - * syntax .. + * @see Rule syntax * * @param csvFileDataSource * @return String representation corresponding to a given @@ -386,9 +354,7 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see Rule - * syntax .. + * @see Rule syntax * * * @param rdfFileDataSource @@ -403,9 +369,7 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see Rule - * syntax . + * @see Rule syntax * * * @param dataSource diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 09ce844cc..6cadfa1af 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -534,7 +534,7 @@ public String getPrefixIri(String prefixName) throws PrefixDeclarationException /** * Resolve a prefixed name into an absolute IRI. Dual to - * {@link unresolveAbsoluteIri}. + * {@link KnowledgeBase#unresolveAbsoluteIri}. * * @param prefixedName the prefixed name to resolve. * @@ -548,7 +548,7 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE /** * Potentially abbreviate an absolute IRI using the declared prefixes. Dual to - * {@link resolvePrefixedName}. + * {@link KnowledgeBase#resolvePrefixedName}. * * @param iri the absolute IRI to abbreviate. * diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java index 7c8ab043c..b83e5c142 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -180,21 +180,21 @@ public AbstractClassToRuleConverter(final Term mainTerm, final SimpleConjunction } /** - * Returns true if the current rule is a tautology, i.e., has an unsatisfiable - * body or a tautological head. + * Check whether the current rule is a tautology. * - * @return + * @return true if the current rule is a tautology, i.e., has an + * unsatisfiable body or a tautological head. */ public boolean isTautology() { return this.body.isFalse() || this.head.isTrue(); } /** - * Returns true if the current rule represents a falsity, i.e., has a - * tautological (or non-existent) body and an unsatisfiable (or no-existent) - * head. + * Checks whether the current rule is a falsity. * - * @return + * @return true if the current rule represents a falsity, i.e., + * has a tautological (or non-existent) body and an unsatisfiable + * (or no-existent) head. */ public boolean isFalsity() { return this.body.isTrueOrEmpty() && this.head.isFalseOrEmpty(); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java index bc94fc7ba..a64eac992 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,7 +37,7 @@ public interface DatatypeConstantHandler { * @throws ParsingException when the given representation is invalid for this * datatype. * - * @return + * @return a {@link DatatypeConstant} corresponding to the lexical form. */ public DatatypeConstant createConstant(String lexicalForm) throws ParsingException; } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index 1f13e7799..c61ec77c3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -43,7 +43,7 @@ final public class LocalPrefixDeclarationRegistry extends AbstractPrefixDeclarat /** * Construct a Prefix declaration registry without an inherited base IRI. In - * this case, we default to {@value PrefixDeclarationRegistry#EMPTY_BASE}. + * this case, we default to {@value org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry#EMPTY_BASE}. */ public LocalPrefixDeclarationRegistry() { this(PrefixDeclarationRegistry.EMPTY_BASE); // empty string encodes: "no base" (use relative IRIs) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 8213f30a1..41a551a75 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -78,8 +78,7 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see - * the grammar. + * @see the grammar * * @param name Name of the data source, as it appears in the declaring * directive. From ab6d612b4fa2429fa738183a210eab65052bc66e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 18 May 2020 12:26:52 +0200 Subject: [PATCH 0852/1255] Parser: Fix broken test on Windows --- .../rulewerk/parser/DirectiveHandlerTest.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index ef0a4eb6c..02f58e5ad 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,6 +21,7 @@ */ import static org.junit.Assert.*; +import java.io.File; import java.net.MalformedURLException; import java.net.URI; @@ -39,7 +40,7 @@ public class DirectiveHandlerTest { @Test public void validateStringArgument_stringArgument_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument"), STRING); + assertEquals(STRING, DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument")); } @Test(expected = ParsingException.class) @@ -54,7 +55,7 @@ public void validateStringArgument_termArgument_throws() throws ParsingException @Test public void validateIriArgument_iriArgument_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument"), IRI); + assertEquals(IRI, DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument")); } @Test(expected = ParsingException.class) @@ -69,7 +70,7 @@ public void validateIriArgument_termArgument_throws() throws ParsingException { @Test public void validateTermArgument_termArgument_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument"), TERM); + assertEquals(TERM, DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument")); } @Test(expected = ParsingException.class) @@ -84,7 +85,7 @@ public void validateTermArgument_iriArgument_throws() throws ParsingException { @Test public void validateFilenameArgument_filename_succeeds() throws ParsingException { - assertEquals(DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument").getPath(), STRING); + assertEquals(new File(STRING), DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument")); } @Test @@ -95,7 +96,7 @@ public void validateFilenameArgument_invalidFilename_throws() throws ParsingExce @Test public void validateUrlArgument_url_succeeds() throws ParsingException, MalformedURLException { - assertEquals(DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument"), IRI.toURL()); + assertEquals(IRI.toURL(), DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument")); } @Test(expected = ParsingException.class) From ca0851ea0828f1e23fc8e12a5867076b10f16d07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Larry=20Gonz=C3=A1lez?= Date: Mon, 18 May 2020 14:45:12 +0200 Subject: [PATCH 0853/1255] remove unused imports; use autoformat --- coverage/pom.xml | 103 +++++++++--------- .../implementation/CsvFileDataSource.java | 1 - .../implementation/RdfFileDataSource.java | 5 +- .../core/model/DataSourceDeclarationTest.java | 4 +- .../implementation/AddDataSourceTest.java | 4 +- .../FileDataSourceTestUtils.java | 2 - 6 files changed, 56 insertions(+), 63 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 977046572..7b75d4ebd 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -1,4 +1,5 @@ - 4.0.0 @@ -13,58 +14,58 @@ coverage - - org.semanticweb.rulewerk - rulewerk-core - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-rdf - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-owlapi - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-graal - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-parser - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-client - 0.6.0-SNAPSHOT - + + org.semanticweb.rulewerk + rulewerk-core + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-rdf + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-owlapi + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-graal + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-parser + 0.6.0-SNAPSHOT + + + org.semanticweb.rulewerk + rulewerk-client + 0.6.0-SNAPSHOT + - - - - org.eluder.coveralls - coveralls-maven-plugin - - - org.jacoco - jacoco-maven-plugin - - - aggregate-reports-ut - test - - report-aggregate - - - - - + + + + org.eluder.coveralls + coveralls-maven-plugin + + + org.jacoco + jacoco-maven-plugin + + + aggregate-reports-ut + test + + report-aggregate + + + + + diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 7b7812b4c..cb1dea326 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.Arrays; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index e56148544..265db485c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Optional; @@ -55,8 +54,8 @@ public class RdfFileDataSource extends FileDataSource { /** * Constructor. * - * @param rdfFile path to a file of a {@code .nt} or {@code .nt.gz} extension and a - * valid N-Triples format. + * @param rdfFile path to a file of a {@code .nt} or {@code .nt.gz} extension + * and a valid N-Triples format. * @throws IOException if the path of the given {@code rdfFile} is * invalid. * @throws IllegalArgumentException if the extension of the given diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 93f52f6c4..310c24715 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; -import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -91,8 +90,7 @@ public void toString_CsvFileDataSource() throws IOException { unzippedCsvFileDataSource); final String expectedFilePath = Serializer.getString(relativeDirName + fileName); - assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", - dataSourceDeclaration.toString()); + assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index ce8ae45ef..2739ae08c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -23,7 +23,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; @@ -223,8 +222,7 @@ public void testAddDataSourceNoFactsForPredicate() throws IOException { public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); - final DataSource dataSource2 = new CsvFileDataSource( - FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"); + final DataSource dataSource2 = new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index ea714f865..dc027e2a0 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -3,8 +3,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import java.io.File; - /*- * #%L * Rulewerk Core Components From 5312938ae685f47b1edc8d1a0aff160f54cd31fd Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 06:35:00 +0200 Subject: [PATCH 0854/1255] Core: Split out VLog-specific code into a rulewerk-vlog --- RELEASE-NOTES.md | 5 + coverage/pom.xml | 65 +++--- pom.xml | 1 + rulewerk-client/pom.xml | 5 + .../rulewerk/client/picocli/ClientUtils.java | 4 +- .../client/picocli/PrintQueryResults.java | 4 +- .../client/picocli/RulewerkClient.java | 4 +- .../picocli/RulewerkClientMaterialize.java | 6 +- .../rulewerk/client/picocli/SaveModel.java | 4 +- .../client/picocli/SaveQueryResults.java | 4 +- rulewerk-core/pom.xml | 49 +---- .../IncompatiblePredicateArityException.java | 4 +- .../PrefixDeclarationException.java | 4 +- .../exceptions/ReasonerStateException.java | 4 +- .../core/exceptions/RulewerkException.java | 4 +- .../exceptions/RulewerkRuntimeException.java | 4 +- .../model/api/PrefixDeclarationRegistry.java | 4 +- .../implementation/AbstractLiteralImpl.java | 4 +- .../AbstractPrefixDeclarationRegistry.java | 4 +- .../model/implementation/Expressions.java | 4 +- .../MergingPrefixDeclarationRegistry.java | 4 +- .../implementation/NegativeLiteralImpl.java | 4 +- .../implementation/PositiveLiteralImpl.java | 4 +- .../model/implementation/PredicateImpl.java | 4 +- .../core/model/implementation/Serializer.java | 4 +- .../rulewerk/core/reasoner/KnowledgeBase.java | 4 +- .../rulewerk/core/reasoner/Reasoner.java | 21 +- .../implementation/CsvFileDataSource.java | 8 +- .../DataSourceConfigurationVisitor.java | 31 +++ .../EmptyQueryResultIterator.java | 2 +- .../implementation/FileDataSource.java | 28 +-- .../implementation/InMemoryDataSource.java | 25 ++- .../implementation/QueryAnswerCountImpl.java | 12 +- .../implementation/QueryResultImpl.java | 6 +- .../implementation/RdfFileDataSource.java | 9 +- ...ataSource.java => ReasonerDataSource.java} | 20 +- .../implementation/Skolemization.java | 4 +- .../SparqlQueryResultDataSource.java | 31 +-- .../core/model/DataSourceDeclarationTest.java | 4 +- .../MergingPrefixDeclarationRegistryTest.java | 4 +- .../core/reasoner/KnowledgeBaseTest.java | 4 +- .../implementation/QueryResultImplTest.java | 1 - rulewerk-examples/pom.xml | 5 + .../examples/CompareWikidataDBpedia.java | 2 +- .../rulewerk/examples/CountingTriangles.java | 2 +- .../rulewerk/examples/DoidExample.java | 2 +- .../rulewerk/examples/ExamplesUtils.java | 4 +- .../InMemoryGraphAnalysisExample.java | 2 +- .../examples/SimpleReasoningExample.java | 6 +- .../examples/core/AddDataFromCsvFile.java | 2 +- .../examples/core/AddDataFromRdfFile.java | 2 +- .../core/AddDataFromSparqlQueryResults.java | 5 +- .../core/ConfigureReasonerLogging.java | 7 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/AddDataFromDlgpFile.java | 7 +- .../examples/graal/AddDataFromGraal.java | 6 +- .../examples/graal/DoidExampleGraal.java | 6 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 6 +- .../examples/rdf/AddDataFromRdfModel.java | 6 +- .../rulewerk/graal/GraalConvertException.java | 4 +- .../graal/GraalToRulewerkModelConverter.java | 4 +- .../GraalToRulewerkModelConverterTest.java | 4 +- .../owlapi/OwlAxiomToRulesConverter.java | 4 +- .../OwlFeatureNotSupportedException.java | 4 +- .../owlapi/OwlToRulesConversionHelper.java | 4 +- rulewerk-parser/pom.xml | 1 - .../parser/ConfigurableLiteralHandler.java | 4 +- .../rulewerk/parser/DirectiveHandler.java | 4 +- .../LocalPrefixDeclarationRegistry.java | 4 +- .../rulewerk/parser/ParserConfiguration.java | 4 +- .../rulewerk/parser/ParsingException.java | 4 +- .../CsvFileDataSourceDeclarationHandler.java | 4 +- .../RdfFileDataSourceDeclarationHandler.java | 4 +- .../ImportFileDirectiveHandler.java | 4 +- .../ImportFileRelativeDirectiveHandler.java | 4 +- .../parser/javacc/JavaCCParserBase.java | 4 +- .../parser/RuleParserDataSourceTest.java | 4 +- rulewerk-rdf/pom.xml | 10 +- .../rulewerk/rdf/RdfValueToTermConverter.java | 4 +- .../rulewerk/rdf/TestReasonOverRdfFacts.java | 2 +- rulewerk-vlog/LICENSE.txt | 201 ++++++++++++++++++ rulewerk-vlog/pom.xml | 70 ++++++ .../reasoner/vlog}/ModelToVLogConverter.java | 8 +- .../reasoner/vlog}/TermToVLogConverter.java | 9 +- .../VLogDataSourceConfigurationVisitor.java | 73 +++++++ .../reasoner/vlog}/VLogKnowledgeBase.java | 13 +- .../vlog}/VLogQueryResultIterator.java | 6 +- .../rulewerk/reasoner/vlog}/VLogReasoner.java | 11 +- .../reasoner/vlog}/VLogToModelConverter.java | 16 +- .../src/test/data/input/binaryFacts.csv | 0 .../src/test/data/input/constantD.csv | 0 .../src/test/data/input/empty.csv | 0 .../test/data/input/invalidFormatNtFacts.nt | 0 .../src/test/data/input/ternaryFacts.nt | 0 .../test/data/input/ternaryFactsZipped.nt.gz | Bin .../src/test/data/input/unaryFacts.csv | 0 .../src/test/data/input/unaryFactsCD.csv | 0 .../test/data/input/unaryFactsZipped.csv.gz | Bin .../src/test/data/output/.keep | 0 .../src/test/data/output/binaryFacts.csv | 2 + .../src/test/data/output/exclude_blanks.csv | 0 .../src/test/data/output/include_blanks.csv | 2 + .../src/test/data/output/unaryFacts.csv | 2 + .../reasoner/vlog}/AddDataSourceTest.java | 9 +- .../reasoner/vlog}/AnswerQueryTest.java | 4 +- .../reasoner/vlog}/CsvFileDataSourceTest.java | 25 +-- .../vlog/ExportQueryResultToCsvFileTest.java | 5 +- .../vlog}/FileDataSourceTestUtils.java | 11 +- .../GeneratedAnonymousIndividualsTest.java | 4 +- .../reasoner/vlog/LargeAritiesTest.java | 4 +- .../rulewerk/reasoner/vlog}/LoggingTest.java | 27 +-- .../vlog}/ModelToVLogConverterTest.java | 4 +- .../reasoner/vlog}/QueryAnswerCountTest.java | 4 +- .../vlog}/QueryAnsweringCorrectnessTest.java | 5 +- .../reasoner/vlog}/QueryResultsUtils.java | 4 +- .../reasoner/vlog}/RdfFileDataSourceTest.java | 26 +-- .../reasoner/vlog}/ReasonerTimeoutTest.java | 18 +- .../SparqlQueryResultDataSourceTest.java | 28 +-- .../reasoner/vlog/StratifiedNegationTest.java | 11 +- .../vlog/VLogDataFromCsvFileTest.java | 5 +- .../reasoner/vlog/VLogDataFromMemoryTest.java | 4 +- .../vlog/VLogDataFromRdfFileTest.java | 5 +- ...LogDataSourceConfigurationVisitorTest.java | 105 +++++++++ .../reasoner/vlog/VLogExpressions.java | 4 +- .../reasoner/vlog/VLogQueryResultUtils.java | 4 +- .../reasoner/vlog/VLogQueryTest.java | 4 +- .../reasoner/vlog}/VLogReasonerBasics.java | 8 +- .../vlog}/VLogReasonerCombinedInputs.java | 9 +- .../reasoner/vlog}/VLogReasonerCsvInput.java | 10 +- .../reasoner/vlog}/VLogReasonerCsvOutput.java | 4 +- .../reasoner/vlog}/VLogReasonerNegation.java | 8 +- .../reasoner/vlog}/VLogReasonerRdfInput.java | 10 +- .../vlog}/VLogReasonerSparqlInput.java | 9 +- .../reasoner/vlog}/VLogReasonerStateTest.java | 21 +- .../VLogReasonerWriteInferencesTest.java | 43 ++-- .../reasoner/vlog/VLogTermNamesTest.java | 4 +- .../vlog}/VLogToModelConverterTest.java | 8 +- 137 files changed, 951 insertions(+), 494 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java rename rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/{VLogDataSource.java => ReasonerDataSource.java} (61%) create mode 100644 rulewerk-vlog/LICENSE.txt create mode 100644 rulewerk-vlog/pom.xml rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/ModelToVLogConverter.java (98%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/TermToVLogConverter.java (96%) create mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogKnowledgeBase.java (95%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogQueryResultIterator.java (95%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasoner.java (98%) rename {rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogToModelConverter.java (95%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/binaryFacts.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/constantD.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/empty.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/invalidFormatNtFacts.nt (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/ternaryFacts.nt (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/ternaryFactsZipped.nt.gz (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/unaryFacts.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/unaryFactsCD.csv (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/input/unaryFactsZipped.csv.gz (100%) rename {rulewerk-core => rulewerk-vlog}/src/test/data/output/.keep (100%) create mode 100644 rulewerk-vlog/src/test/data/output/binaryFacts.csv create mode 100644 rulewerk-vlog/src/test/data/output/exclude_blanks.csv create mode 100644 rulewerk-vlog/src/test/data/output/include_blanks.csv create mode 100644 rulewerk-vlog/src/test/data/output/unaryFacts.csv rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/AddDataSourceTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/AnswerQueryTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/CsvFileDataSourceTest.java (76%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/ExportQueryResultToCsvFileTest.java (93%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/FileDataSourceTestUtils.java (96%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/GeneratedAnonymousIndividualsTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/LargeAritiesTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/LoggingTest.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/ModelToVLogConverterTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/QueryAnswerCountTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/QueryAnsweringCorrectnessTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/QueryResultsUtils.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/RdfFileDataSourceTest.java (70%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/ReasonerTimeoutTest.java (94%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/SparqlQueryResultDataSourceTest.java (58%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/StratifiedNegationTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogDataFromCsvFileTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogDataFromMemoryTest.java (99%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogDataFromRdfFileTest.java (96%) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogExpressions.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogQueryResultUtils.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogQueryTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerBasics.java (93%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerCombinedInputs.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerCsvInput.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerCsvOutput.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerNegation.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerRdfInput.java (95%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerSparqlInput.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerStateTest.java (96%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogReasonerWriteInferencesTest.java (97%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk}/reasoner/vlog/VLogTermNamesTest.java (98%) rename {rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation => rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog}/VLogToModelConverterTest.java (97%) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 24f38e9e0..d0408e336 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -11,6 +11,11 @@ Breaking changes: `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` * The `FileDataSource` constructor and those of derived classes now take the path to a file instead of `File` object. +* The VLog backend has been moved to a new `rulewerk-vlog` module, + changing several import paths. `Reasoner.getInstance()` now takes a + mandatory argument, a function taking a `KnowledgeBase` and + returning a `Reasoner` instance. Previous behaviour can be obtained + by using `Reasoner.getInstance(VLogReasoner::new)`. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` diff --git a/coverage/pom.xml b/coverage/pom.xml index 7b75d4ebd..c91db4c28 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -14,36 +14,41 @@ coverage - - org.semanticweb.rulewerk - rulewerk-core - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-rdf - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-owlapi - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-graal - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-parser - 0.6.0-SNAPSHOT - - - org.semanticweb.rulewerk - rulewerk-client - 0.6.0-SNAPSHOT - + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-client + ${project.version} + diff --git a/pom.xml b/pom.xml index 944c4f5fb..bbe69eb09 100644 --- a/pom.xml +++ b/pom.xml @@ -18,6 +18,7 @@ rulewerk-core + rulewerk-vlog rulewerk-rdf rulewerk-examples rulewerk-owlapi diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index cc051d591..a3760ec8b 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -31,6 +31,11 @@ rulewerk-parser ${project.version} + + ${project.groupId} + rulewerk-vlog + ${project.version} + org.slf4j slf4j-log4j12 diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index aca4cd136..4b7d94e48 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index a36f0d85d..de472be40 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java index 54de47a9b..ee48b9beb 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java index 91a4d0cda..ba3d2ccdc 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; import picocli.CommandLine.ArgGroup; diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index bcd23d052..03f98eff1 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index 9ca9bd4fb..1f84bb15a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 35b7e5ea4..36a019c2c 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -14,52 +14,5 @@ jar Rulewerk Core Components - Core components of Rulewerk: reasoner and model - - - 1.3.3-snapshot - - - - - - ${project.groupId} - vlog-base - ${karmaresearch.vlog.version} - - - - - - development - - - - - - org.apache.maven.plugins - maven-install-plugin - 2.4 - - - initialize - - install-file - - - ${project.groupId} - vlog-base - ${karmaresearch.vlog.version} - jar - ./lib/jvlog-local.jar - - - - - - - - + Core components of Rulewerk: reasoner interface and model diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index 28e22ce99..57f505120 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index eacafd6de..0e6515403 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index af961ffda..d87ef7fcd 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java index ea0eaca0e..5223c04bc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java index 9ff8dca3a..93237f788 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 2ec1c15bc..cde555c76 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index d245da52a..4b3669226 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 892f2d33b..2bb4e72a7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index d1b62815a..1d465c4b1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 11e42fb88..c9104a280 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index cf7b69212..d4efc8496 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index e95d5cfaa..dc0892e78 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 04741fb8e..da4bff697 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index b4c6d07cd..281c16cc6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 6cadfa1af..7765364c6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 5ea9ee0b1..e7a0245c2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,6 +24,8 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.function.Function; +import java.util.function.Supplier; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; @@ -35,10 +37,9 @@ import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; /** - * Interface that exposes the existential rule reasoning capabilities of VLog. + * Interface that exposes the (existential) rule reasoning capabilities of a Reasoner. *
* The knowledge base of the reasoner can be loaded with explicit facts * and existential rules that would infer implicit facts trough @@ -76,11 +77,15 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** * Factory method that to instantiate a Reasoner with an empty knowledge base. * - * @return a {@link VLogReasoner} instance. + * @return a {@link Reasoner} instance. */ - static Reasoner getInstance() { - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - return new VLogReasoner(knowledgeBase); + static Reasoner getInstance(Function makeReasoner) { + return getInstance(makeReasoner, KnowledgeBase::new); + } + + static Reasoner getInstance(Function makeReasoner, Supplier makeKnowledgeBase) { + final KnowledgeBase knowledgeBase = makeKnowledgeBase.get(); + return makeReasoner.apply(knowledgeBase); } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index cb1dea326..3ee0a4574 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -77,4 +77,8 @@ public String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java new file mode 100644 index 000000000..82b3d11de --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -0,0 +1,31 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface DataSourceConfigurationVisitor { + public void visit(CsvFileDataSource dataSource); + + public void visit(RdfFileDataSource dataSource); + + public void visit(SparqlQueryResultDataSource dataSource); + + public void visit(InMemoryDataSource dataSource); +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java index 449a9dbe9..3c3df5d9b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -26,7 +26,7 @@ /** * Iterator that represents an empty query result. - * + * * @author Markus Kroetzsch * */ diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index ec902eb00..cbdb10e61 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,10 +38,7 @@ * @author Irina Dragoste * */ -public abstract class FileDataSource extends VLogDataSource { - - private final static String DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; - +public abstract class FileDataSource implements ReasonerDataSource { private final File file; private final String filePath; private final String fileName; @@ -87,21 +84,6 @@ private String getValidExtension(final String fileName, final Iterable p return potentialExtension.get(); } - @Override - public final String toConfigString() { - final String configStringPattern = - - PREDICATE_NAME_CONFIG_LINE + - - DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - - "EDB%1$d_param0=" + this.dirCanonicalPath + "\n" + - - "EDB%1$d_param1=" + this.fileNameWithoutExtension + "\n"; - - return configStringPattern; - } - public File getFile() { return this.file; } @@ -119,7 +101,7 @@ public String getName() { * * @return The canonical path to the parent directory where the file resides. */ - String getDirCanonicalPath() { + public String getDirCanonicalPath() { return this.dirCanonicalPath; } @@ -128,7 +110,7 @@ String getDirCanonicalPath() { * * @return the file basename without any extension. */ - String getFileNameWithoutExtension() { + public String getFileNameWithoutExtension() { return this.fileNameWithoutExtension; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index c3033c8a6..72af91a42 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -35,7 +35,7 @@ * @author Markus Kroetzsch * */ -public class InMemoryDataSource implements DataSource { +public class InMemoryDataSource implements ReasonerDataSource { String[][] data; int nextEmptyTuple = 0; @@ -57,6 +57,22 @@ public InMemoryDataSource(final int arity, final int initialCapacity) { this.data = new String[initialCapacity][arity]; } + /** + * Transforms a constant name in a format suitable for the + * reasoner. The default implementation assumes the VLog backend. + * @param constantName the name of the constant + * + * @return a transformed version of constantName that is suitable for the Reasoner. + */ + protected String transformConstantName(String constantName) { + if (!constantName.startsWith("\"") && constantName.contains(":")) { + // enclose IRIs with brackets + return "<" + constantName + ">"; + } + // it's either a datatype literal, or a relative IRI, leave it unchanged + return constantName; + } + /** * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. @@ -74,7 +90,7 @@ public void addTuple(final String... constantNames) { } this.data[this.nextEmptyTuple] = new String[this.arity]; for (int i = 0; i < this.arity; i++) { - this.data[this.nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); + this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); } this.nextEmptyTuple++; } @@ -105,4 +121,9 @@ public String getSyntacticRepresentation() { } return sb.toString(); } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java index edd6b44ca..c433758ee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -1,8 +1,5 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; -import org.semanticweb.rulewerk.core.reasoner.Correctness; -import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; - /*- * #%L * Rulewerk Core Components @@ -23,6 +20,9 @@ * #L% */ +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; + public class QueryAnswerCountImpl implements QueryAnswerCount { final private Correctness correctness; @@ -30,14 +30,14 @@ public class QueryAnswerCountImpl implements QueryAnswerCount { /** * Constructor of QueryAnswerSize - * + * * @param correctness of the evaluated query. See {@link Correctness}. - * + * * @param size number of query answers, i.e. number of facts in the * extension of the query. */ - QueryAnswerCountImpl(Correctness correctness, long size) { + public QueryAnswerCountImpl(Correctness correctness, long size) { this.correctness = correctness; this.count = size; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index b143f7b4f..7d8a06f24 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -26,15 +26,15 @@ import org.semanticweb.rulewerk.core.model.api.Term; /** - * Implements {@link QueryResult}s. + * Implements {@link QueryResult}s. * @author Irina Dragoste * */ -final class QueryResultImpl implements QueryResult { +public final class QueryResultImpl implements QueryResult { private final List terms; - QueryResultImpl(List terms) { + public QueryResultImpl(List terms) { this.terms = terms; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 265db485c..29a3f327f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -80,4 +80,9 @@ public String getSyntacticRepresentation() { public Optional getRequiredArity() { return Optional.of(3); } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java similarity index 61% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java index 68eb58133..57c6e1dee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java @@ -23,22 +23,14 @@ import org.semanticweb.rulewerk.core.model.api.DataSource; /** - * Abstract base class for VLog-specific data sources. - * - * @author Markus Kroetzsch - * + * An interface for DataSources that can be used with a Reasoner. */ -public abstract class VLogDataSource implements DataSource { - - public static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; - public static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; - +public interface ReasonerDataSource extends DataSource { /** - * Constructs a String representation of the data source. + * Accept a {@link DataSourceConfigurationVisitor} to configure a + * reasoner to load this data source. * - * @return a String representation of the data source configuration for a - * certain predicate. + * @param visitor the visitor. */ - public abstract String toConfigString(); - + public void accept(DataSourceConfigurationVisitor visitor); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index f60081c15..b0bc00877 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 0015bece5..cdc8723d2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,9 +36,9 @@ * @author Irina Dragoste * */ -public class SparqlQueryResultDataSource extends VLogDataSource { +public class SparqlQueryResultDataSource implements ReasonerDataSource { + - private static final String DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; private final URL endpoint; private final String queryVariables; @@ -108,20 +108,20 @@ public String getQueryVariables() { return this.queryVariables; } - @Override - public final String toConfigString() { - final String configStringPattern = + // @Override + // public final String toConfigString() { + // final String configStringPattern = - PREDICATE_NAME_CONFIG_LINE + + // PREDICATE_NAME_CONFIG_LINE + - DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + + // DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + + // "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + - "EDB%1$d_param2=" + this.queryBody + "\n"; + // "EDB%1$d_param2=" + this.queryBody + "\n"; - return configStringPattern; - } + // return configStringPattern; + // } static String getQueryVariablesList(LinkedHashSet queryVariables) { final StringBuilder sb = new StringBuilder(); @@ -177,4 +177,9 @@ public String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } + } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 310c24715..dfd658443 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 9f06ee6d2..812385669 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 15c633f86..d989d1a9f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java index 724a41064..38d57c214 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java @@ -32,7 +32,6 @@ import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; public class QueryResultImplTest { diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 216564049..84344dd75 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -41,6 +41,11 @@ ${project.groupId} rulewerk-parser ${project.version} +
+ + ${project.groupId} + rulewerk-vlog + ${project.version} org.slf4j diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index 85f9e8b3b..d3249c93d 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -24,7 +24,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java index b44afdbf8..7d9cbb509 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java @@ -25,7 +25,7 @@ import java.io.IOException; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java index b80163fb4..f903a5207 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java @@ -28,7 +28,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index 5149abc41..a0801f415 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 0647c1edc..591261fb1 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -27,7 +27,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index 28e870fa6..3f0f1ab6e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java index 0b76c4bb3..4aea67362 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java @@ -26,7 +26,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java index 2aa2c02a2..50770072d 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java @@ -27,7 +27,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index 8eb7a2a8f..2f74a96fa 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -39,6 +39,7 @@ import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; /** @@ -46,7 +47,7 @@ * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In * this example, we will query Wikidata for titles of publications that have * authors who have children together. - * + * * @author Irina Dragoste * */ @@ -120,7 +121,7 @@ public static void main(final String[] args) throws IOException { */ final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index 81039cf41..902fbf543 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,6 +25,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -65,7 +66,7 @@ public class ConfigureReasonerLogging { public static void main(final String[] args) throws IOException, ParsingException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* exists z. B(?y, !z) :- A(?x, ?y) . */ kb.addStatements(RuleParser.parseRule("B(?Y, !Z) :- A(?X, ?Y) .")); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java index ae4042817..84b1291af 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java @@ -25,7 +25,7 @@ import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.reasoner.Algorithm; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 2dc50ca84..1f9621bd8 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +29,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; @@ -101,7 +102,7 @@ public static void main(final String[] args) throws IOException { * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance()) { + try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index b4f05470f..b5c39441e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,7 +27,7 @@ import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 971fedb8b..9bfac8c74 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,7 +35,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.DoidExample; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index 5bcb6bea0..52815942e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index 77af29d19..79298dd2c 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -45,7 +45,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index a99563a3f..836fa51e0 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java index 5ab82d428..94fa0cd7c 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java index b6c3cd88b..fb8fcc40f 100644 --- a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 80390026e..b4ddbc3ee 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index 06f02adca..dcae928b1 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 802161334..f5c737dc2 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index 70df4a5b6..87da11c8d 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -14,7 +14,6 @@ rulewerk-parser Rulewerk Parser - http://maven.apache.org UTF-8 diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index 5b8fddbdd..ff356ba3f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 146faf39c..ae6c25251 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index c61ec77c3..faa15a7db 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 41a551a75..f6bc3b9d3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java index 802cafe03..f5890b9e8 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 7979f154f..67a66c9c4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index ee7a2ec79..259c26759 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 9c24f2ab1..ae227a9d1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 62b5e246f..1ef7a4372 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index a7878d797..54126cd0f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 14c2bceb3..e72ae9dfb 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index e8cb4b566..efed746b6 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -2,9 +2,9 @@ - + 4.0.0 - + org.semanticweb.rulewerk rulewerk-parent @@ -23,6 +23,12 @@ rulewerk-core ${project.version} + + ${project.groupId} + rulewerk-vlog + ${project.version} + test + org.openrdf.sesame diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index ef732de3f..058228665 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index 6297a8968..214d32d6e 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -44,7 +44,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class TestReasonOverRdfFacts { diff --git a/rulewerk-vlog/LICENSE.txt b/rulewerk-vlog/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-vlog/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml new file mode 100644 index 000000000..7aa17b881 --- /dev/null +++ b/rulewerk-vlog/pom.xml @@ -0,0 +1,70 @@ + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-vlog + jar + + Rulewerk VLog Reasoner Support + and model + + + 1.3.3-snapshot + + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + + ${project.groupId} + vlog-base + ${karmaresearch.vlog.version} + + + + + + development + + + + + + org.apache.maven.plugins + maven-install-plugin + 2.4 + + + initialize + + install-file + + + ${project.groupId} + vlog-base + ${karmaresearch.vlog.version} + jar + ./lib/jvlog-local.jar + + + + + + + + + diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java similarity index 98% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java index ae7f59597..3fe63160a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -39,7 +39,7 @@ * Utility class with static methods for converting from VLog API model objects * ({@code org.semanticweb.rulewerk.core.model}) to internal VLog model objects * ({@code karmaresearch.vlog}). - * + * * @author Irina Dragoste * */ @@ -97,7 +97,7 @@ static String[] toVLogFactTuple(final Fact fact) { /** * Internal String representation that uniquely identifies a {@link Predicate}. - * + * * @param predicate a {@link Predicate} * @return String representation corresponding to given predicate name and * arity. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java similarity index 96% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 64bc83db3..90496ff76 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,6 +30,7 @@ import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; /** * A visitor that converts {@link Term}s of different types to corresponding diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java new file mode 100644 index 000000000..dd4ac05f9 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -0,0 +1,73 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; + +public class VLogDataSourceConfigurationVisitor implements DataSourceConfigurationVisitor { + private String configString = null; + + private static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; + private static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; + private final static String FILE_DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; + private static final String SPARQL_DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; + + public String getConfigString() { + return configString; + } + + protected void setFileConfigString(FileDataSource dataSource) { + this.configString = + PREDICATE_NAME_CONFIG_LINE + + DATASOURCE_TYPE_CONFIG_PARAM + "=" + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + + "EDB%1$d_param0=" + dataSource.getDirCanonicalPath() + "\n" + + "EDB%1$d_param1=" + dataSource.getFileNameWithoutExtension() + "\n"; + } + + @Override + public void visit(CsvFileDataSource dataSource) { + setFileConfigString(dataSource); + } + + @Override + public void visit(RdfFileDataSource dataSource) { + setFileConfigString(dataSource); + } + + @Override + public void visit(SparqlQueryResultDataSource dataSource) { + this.configString = + PREDICATE_NAME_CONFIG_LINE + + DATASOURCE_TYPE_CONFIG_PARAM + "=" + SPARQL_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + + "EDB%1$d_param0=" + dataSource.getEndpoint() + "\n" + "EDB%1$d_param1=" + dataSource.getQueryVariables() + "\n" + + "EDB%1$d_param2=" + dataSource.getQueryBody() + "\n"; + } + + @Override + public void visit(InMemoryDataSource dataSource) { + this.configString = null; + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java similarity index 95% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java index a7e7da9aa..3bd57a52f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -46,6 +46,7 @@ import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; /** * Class for organizing a Knowledge Base using vLog-specific data structures. @@ -120,9 +121,11 @@ int addDataSourceConfigurationString(final DataSource dataSource, final Predicat int newDataSourceIndex = dataSourceIndex; if (dataSource != null) { - if (dataSource instanceof VLogDataSource) { - final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; - final String configString = vLogDataSource.toConfigString(); + if (dataSource instanceof ReasonerDataSource) { + final ReasonerDataSource reasonerDataSource = (ReasonerDataSource) dataSource; + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + reasonerDataSource.accept(visitor); + final String configString = visitor.getConfigString(); if (configString != null) { formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); newDataSourceIndex++; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java similarity index 95% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java index 1db2cc922..35dc7f75c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -30,7 +30,7 @@ /** * Iterates trough all answers to a query. An answer to a query is a * {@link QueryResult}. Each query answer is distinct. - * + * * @author Irina Dragoste * */ diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java similarity index 98% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 00b774ef0..c487cc102 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -55,6 +55,9 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.ReasonerState; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.reasoner.implementation.EmptyQueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java similarity index 95% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index d90331ccc..581e13368 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -31,12 +31,14 @@ import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** * Utility class with static methods for converting from VLog internal model * ({@code karmaresearch.vlog} objects) to VLog API model * ({@code org.semanticweb.rulewerk.core.model.api}) objects. - * + * * @author Irina Dragoste * */ @@ -45,7 +47,7 @@ class VLogToModelConverter { /** * Converts internal VLog query results (represented as arrays of * {@link karmaresearch.vlog.Term}s) into VLog model API QueryResults. - * + * * @param vLogQueryResult an array of terms that represent an answer to a query. * @return a QueryResult containing the corresponding {@code vLogQueryResult} as * a List of {@link Term}s. @@ -57,7 +59,7 @@ static QueryResult toQueryResult(karmaresearch.vlog.Term[] vLogQueryResult) { /** * Converts an array of internal VLog terms ({@link karmaresearch.vlog.Term}) * into the corresponding list of VLog API model {@link Term}. - * + * * @param vLogTerms input terms array, to be converted to a list of * corresponding {@link Term}s. * @return list of {@link Term}s, where each element corresponds to the element @@ -74,7 +76,7 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { /** * Converts an internal VLog term ({@link karmaresearch.vlog.Term}) to a VLog * API model {@link Term} of the same type and name. - * + * * @param vLogTerm term to be converted * @return a ({@link karmaresearch.vlog.Term}) with the same name as given * {@code vLogTerm} and of the corresponding type. @@ -96,7 +98,7 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { /** * Creates a {@link Constant} from the given VLog constant name. - * + * * @param vLogConstantName the string name used by VLog * @return {@link Constant} object */ diff --git a/rulewerk-core/src/test/data/input/binaryFacts.csv b/rulewerk-vlog/src/test/data/input/binaryFacts.csv similarity index 100% rename from rulewerk-core/src/test/data/input/binaryFacts.csv rename to rulewerk-vlog/src/test/data/input/binaryFacts.csv diff --git a/rulewerk-core/src/test/data/input/constantD.csv b/rulewerk-vlog/src/test/data/input/constantD.csv similarity index 100% rename from rulewerk-core/src/test/data/input/constantD.csv rename to rulewerk-vlog/src/test/data/input/constantD.csv diff --git a/rulewerk-core/src/test/data/input/empty.csv b/rulewerk-vlog/src/test/data/input/empty.csv similarity index 100% rename from rulewerk-core/src/test/data/input/empty.csv rename to rulewerk-vlog/src/test/data/input/empty.csv diff --git a/rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt b/rulewerk-vlog/src/test/data/input/invalidFormatNtFacts.nt similarity index 100% rename from rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt rename to rulewerk-vlog/src/test/data/input/invalidFormatNtFacts.nt diff --git a/rulewerk-core/src/test/data/input/ternaryFacts.nt b/rulewerk-vlog/src/test/data/input/ternaryFacts.nt similarity index 100% rename from rulewerk-core/src/test/data/input/ternaryFacts.nt rename to rulewerk-vlog/src/test/data/input/ternaryFacts.nt diff --git a/rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz b/rulewerk-vlog/src/test/data/input/ternaryFactsZipped.nt.gz similarity index 100% rename from rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz rename to rulewerk-vlog/src/test/data/input/ternaryFactsZipped.nt.gz diff --git a/rulewerk-core/src/test/data/input/unaryFacts.csv b/rulewerk-vlog/src/test/data/input/unaryFacts.csv similarity index 100% rename from rulewerk-core/src/test/data/input/unaryFacts.csv rename to rulewerk-vlog/src/test/data/input/unaryFacts.csv diff --git a/rulewerk-core/src/test/data/input/unaryFactsCD.csv b/rulewerk-vlog/src/test/data/input/unaryFactsCD.csv similarity index 100% rename from rulewerk-core/src/test/data/input/unaryFactsCD.csv rename to rulewerk-vlog/src/test/data/input/unaryFactsCD.csv diff --git a/rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz b/rulewerk-vlog/src/test/data/input/unaryFactsZipped.csv.gz similarity index 100% rename from rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz rename to rulewerk-vlog/src/test/data/input/unaryFactsZipped.csv.gz diff --git a/rulewerk-core/src/test/data/output/.keep b/rulewerk-vlog/src/test/data/output/.keep similarity index 100% rename from rulewerk-core/src/test/data/output/.keep rename to rulewerk-vlog/src/test/data/output/.keep diff --git a/rulewerk-vlog/src/test/data/output/binaryFacts.csv b/rulewerk-vlog/src/test/data/output/binaryFacts.csv new file mode 100644 index 000000000..bcaabc2bc --- /dev/null +++ b/rulewerk-vlog/src/test/data/output/binaryFacts.csv @@ -0,0 +1,2 @@ +c1,c2 +c3,c4 diff --git a/rulewerk-vlog/src/test/data/output/exclude_blanks.csv b/rulewerk-vlog/src/test/data/output/exclude_blanks.csv new file mode 100644 index 000000000..e69de29bb diff --git a/rulewerk-vlog/src/test/data/output/include_blanks.csv b/rulewerk-vlog/src/test/data/output/include_blanks.csv new file mode 100644 index 000000000..e502cf529 --- /dev/null +++ b/rulewerk-vlog/src/test/data/output/include_blanks.csv @@ -0,0 +1,2 @@ +c,1_2_0 +c,1_3_0 diff --git a/rulewerk-vlog/src/test/data/output/unaryFacts.csv b/rulewerk-vlog/src/test/data/output/unaryFacts.csv new file mode 100644 index 000000000..d0aaf976a --- /dev/null +++ b/rulewerk-vlog/src/test/data/output/unaryFacts.csv @@ -0,0 +1,2 @@ +c1 +c2 diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java index 2739ae08c..ed1aa4f23 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java @@ -1,20 +1,20 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,6 +41,7 @@ import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class AddDataSourceTest { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java index 9de6276f3..30d844c3c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -7,7 +7,7 @@ /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java similarity index 76% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java index e94173d20..d85f73d09 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,9 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; + +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class CsvFileDataSourceTest { @@ -54,19 +56,6 @@ public void testConstructor() throws IOException { FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName(), dirCanonicalPath, "file"); } - @Test - public void testToConfigString() throws IOException { - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); - final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); - - final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); - final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" - + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; - - assertEquals(expectedConfigString, unzippedCsvFileDataSource.toConfigString()); - assertEquals(expectedConfigString, zippedCsvFileDataSource.toConfigString()); - } - @Test public void testNoParentDir() throws IOException { final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java similarity index 93% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java index 89ad3228d..3619ce9c8 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -27,7 +27,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java similarity index 96% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java index dc027e2a0..6a0819ed3 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java @@ -1,11 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -23,6 +20,9 @@ * #L% */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + import java.io.FileReader; import java.io.IOException; import java.io.Reader; @@ -38,6 +38,7 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; /** * Utility class for reading from and writing to data source files. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java index 92512fdd6..87f5eb910 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java index 1ee33c9eb..5497489b4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java @@ -1,10 +1,10 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertArrayEquals; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java index 269cb56cc..598ea90c9 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java @@ -1,17 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -29,6 +20,14 @@ * #L% */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; import java.io.IOException; import java.util.Arrays; @@ -40,7 +39,9 @@ import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; public class LoggingTest { @@ -108,7 +109,7 @@ public void testSetLogFileInexistent() throws IOException { @Test(expected = NullPointerException.class) public void testSetLogLevelNull() { - try (final Reasoner instance = Reasoner.getInstance()) { + try (final Reasoner instance = Reasoner.getInstance(VLogReasoner::new)) { instance.setLogLevel(null); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java index b88e5e3ef..653cb5401 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java index 2e7e0c29f..7b2d519f6 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java index ffbada38f..d9cc90601 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -46,6 +46,7 @@ import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class QueryAnsweringCorrectnessTest { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java index 81ee7716e..32af07014 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java similarity index 70% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java index 70e8b4657..f16ab06f2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java @@ -1,19 +1,19 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,9 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; + +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; public class RdfFileDataSourceTest { @@ -53,18 +55,4 @@ public void testConstructor() throws IOException { FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName(), dirCanonicalPath, "file"); FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName(), dirCanonicalPath, "file"); } - - @Test - public void testToConfigString() throws IOException { - final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); - final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); - - final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); - final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" - + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; - - assertEquals(expectedConfigString, unzippedRdfFileDataSource.toConfigString()); - assertEquals(expectedConfigString, zippedRdfFileDataSource.toConfigString()); - } - } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java similarity index 94% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java index b8e28c53d..415e03ccf 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java @@ -1,11 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -23,6 +20,9 @@ * #L% */ +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -38,7 +38,9 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** @@ -46,7 +48,7 @@ * expected and terminates reasoning after the given {@link #timeout}. Results * are accepted within one second to account for setup and tear down of * reasoning resources. - * + * * @author Adrian Bielefeldt * */ @@ -117,7 +119,7 @@ public void setUp() { @Test(expected = IllegalArgumentException.class) public void testSetReasoningTimeout() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { reasoner.setReasoningTimeout(-3); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java similarity index 58% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java index 9e9806113..81f6aaa92 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -32,6 +32,8 @@ import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class SparqlQueryResultDataSourceTest { @@ -40,28 +42,6 @@ public class SparqlQueryResultDataSourceTest { public SparqlQueryResultDataSourceTest() throws MalformedURLException { } - @Test - public void testToStringSimpleConstructor() throws MalformedURLException { - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, "b,a", - "?a wdt:P22 ?b"); - final String expectedStringConfig = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" - + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" - + "EDB%1$d_param2=?a wdt:P22 ?b\n"; - assertEquals(expectedStringConfig, dataSource.toConfigString()); - } - - @Test - public void testToStringList() throws MalformedURLException { - final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, - "?a wdt:P22 ?b"); - final String expectedStringConfig = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" - + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" - + "EDB%1$d_param2=?a wdt:P22 ?b\n"; - assertEquals(expectedStringConfig, dataSource.toConfigString()); - } - @Test(expected = IllegalArgumentException.class) public void testEmptyQueryBodyList() throws IOException { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java index ee9041fda..dcd9243f4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -25,7 +25,6 @@ import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.vlog.VLogExpressions; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; @@ -43,7 +42,7 @@ public class StratifiedNegationTest { /** * P(x), Not(Q(x)) -> R(x) Q - EDB. - * + * * @throws EDBConfigurationException * @throws NotStartedException */ @@ -103,7 +102,7 @@ public void testSimpleInputNegation() /** * P(x), Not(Q(x)) -> R(x)
* R-IDB. - * + * * @throws EDBConfigurationException * @throws NotStartedException */ @@ -152,7 +151,7 @@ public void testStratifiedNegationOnIDB() /** * P(x), Not(Q(x)) -> Q(x)
* Q - IDB. - * + * * @throws EDBConfigurationException * @throws NotStartedException */ diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java index afc81d080..7eae82b26 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -28,7 +28,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java index fb2882349..080030601 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java similarity index 96% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java index a16b34c89..dd15fcca2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -28,7 +28,6 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java new file mode 100644 index 000000000..b4b860f58 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java @@ -0,0 +1,105 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; + +import org.junit.Test; + +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class VLogDataSourceConfigurationVisitorTest { + private final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; + private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; + private final String zippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"; + private final String gzFile = csvFile + ".gz"; + final URL endpoint = new URL("http://query.wikidata.org/sparql"); + + public VLogDataSourceConfigurationVisitorTest() throws MalformedURLException { + } + + @Test + public void visit_CsvFileDataSource_succeeds() throws IOException { + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); + + final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" + + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; + + assertEquals(expectedConfigString, toConfigString(unzippedCsvFileDataSource)); + assertEquals(expectedConfigString, toConfigString(zippedCsvFileDataSource)); + } + + @Test + public void visit_RdfFileDataSource_succeeds() throws IOException { + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); + + final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" + + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; + + assertEquals(expectedConfigString, toConfigString(unzippedRdfFileDataSource)); + assertEquals(expectedConfigString, toConfigString(zippedRdfFileDataSource)); + } + + @Test + public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLException { + final SparqlQueryResultDataSource simpleDataSource = new SparqlQueryResultDataSource(endpoint, "b,a", + "?a wdt:P22 ?b"); + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); + final SparqlQueryResultDataSource listDataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, + "?a wdt:P22 ?b"); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" + + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" + + "EDB%1$d_param2=?a wdt:P22 ?b\n"; + assertEquals(expectedConfigString, toConfigString(simpleDataSource)); + assertEquals(expectedConfigString, toConfigString(listDataSource)); + } + + @Test + public void visit_InMemoryDataSource_returnsNull() { + final InMemoryDataSource inMemoryDataSource = new InMemoryDataSource(1, 1); + assertEquals(null, toConfigString(inMemoryDataSource)); + } + + private String toConfigString(ReasonerDataSource dataSource) { + VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + dataSource.accept(visitor); + return visitor.getConfigString(); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java index d87a36190..0aee3638f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java index a23dae441..d192add94 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java index 57ca22e3f..af7133bca 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java similarity index 93% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java index 0c5ecf2e2..ca6a3bed4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java @@ -1,11 +1,11 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; /* * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% @@ -59,14 +59,14 @@ public class VLogReasonerBasics { @Test(expected = NullPointerException.class) public void testSetAlgorithmNull() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { reasoner.setAlgorithm(null); } } @Test(expected = NullPointerException.class) public void setRuleRewriteStrategy1() { - try (final Reasoner reasoner = Reasoner.getInstance();) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { reasoner.setRuleRewriteStrategy(null); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java index b3f2fba74..17f5eac18 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java @@ -1,19 +1,19 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; import static org.junit.Assert.assertEquals; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,6 +41,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class VLogReasonerCombinedInputs { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java index 3ec10b94f..b5b365174 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -42,6 +42,8 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class VLogReasonerCsvInput { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java index 7585f47bf..45981bb6c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java index 39f802725..215590006 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java similarity index 95% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java index f9b52ad44..c64e829ea 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,6 +39,8 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; public class VLogReasonerRdfInput { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java index 238d488a1..7fd6c34ad 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,6 +39,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class VLogReasonerSparqlInput { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java similarity index 96% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java index 91c61c680..17f3dc8cc 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -49,6 +49,7 @@ import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; public class VLogReasonerStateTest { @@ -67,21 +68,21 @@ public class VLogReasonerStateTest { @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); } } @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.reason(); reasoner.resetReasoner(); reasoner.answerQuery(exampleQueryAtom, true); @@ -90,7 +91,7 @@ public void testFailAnswerQueryAfterReset() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.reason(); reasoner.resetReasoner(); reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); @@ -171,7 +172,7 @@ public void testAddFacts2() throws IOException { @Test public void testResetBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.resetReasoner(); } } @@ -279,7 +280,7 @@ public void testResetEmptyKnowledgeBase() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { + try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 99c6a68f4..5eb58262e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -1,4 +1,24 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ import static org.junit.Assert.*; import static org.mockito.Mockito.*; @@ -28,26 +48,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class VLogReasonerWriteInferencesTest { private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java index ed85ea768..2ff298e9d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java @@ -1,8 +1,8 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java index 0f67a6fa5..2e2db1b5c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java @@ -1,17 +1,17 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * Rulewerk Core Components + * Rulewerk VLog Reasoner Support * %% * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From dcc5f033616ca1cdb26078d0b36a0b209f70995e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 19:53:19 +0200 Subject: [PATCH 0855/1255] Core: Generalise writeInferences to forEachInference --- .../rulewerk/core/reasoner/Reasoner.java | 53 +++++++++++++++++-- .../rulewerk/reasoner/vlog/VLogReasoner.java | 7 +-- 2 files changed, 51 insertions(+), 9 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index e7a0245c2..1cf1f95fa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -24,6 +24,8 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.List; +import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; @@ -33,14 +35,17 @@ import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** - * Interface that exposes the (existential) rule reasoning capabilities of a Reasoner. - *
+ * Interface that exposes the (existential) rule reasoning capabilities of a + * Reasoner.
* The knowledge base of the reasoner can be loaded with explicit facts * and existential rules that would infer implicit facts trough * reasoning.
@@ -77,13 +82,27 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** * Factory method that to instantiate a Reasoner with an empty knowledge base. * + * @param makeReasoner a function that creates a Reasoner instances given a + * {@link KnowledgeBase}. + * * @return a {@link Reasoner} instance. */ static Reasoner getInstance(Function makeReasoner) { return getInstance(makeReasoner, KnowledgeBase::new); } - static Reasoner getInstance(Function makeReasoner, Supplier makeKnowledgeBase) { + /** + * Factory method that to instantiate a Reasoner with an empty knowledge base. + * + * @param makeReasoner a function that creates a Reasoner instances given a + * {@link KnowledgeBase}. + * @param makeKnowledgeBase a function that creates a {@link KnowledgeBase} + * instance. + * + * @return a {@link Reasoner} instance. + */ + static Reasoner getInstance(Function makeReasoner, + Supplier makeKnowledgeBase) { final KnowledgeBase knowledgeBase = makeKnowledgeBase.get(); return makeReasoner.apply(knowledgeBase); } @@ -95,6 +114,27 @@ static Reasoner getInstance(Functi */ KnowledgeBase getKnowledgeBase(); + /** + * Interface for actions to perform on inferences. + * + * Essentially a {@link java.util.function.BiConsumer}, but with a more + * permissive Exception spec. + */ + @FunctionalInterface + public interface InferenceAction { + void accept(Predicate predicate, List termList) throws IOException; + } + + /** + * Performs the given action for each inference. + * + * @param action The action to be performed for each inference. + * @return the correctness of the inferences, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException + */ + Correctness forEachInference(InferenceAction action) throws IOException; + /** * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. @@ -104,7 +144,12 @@ static Reasoner getInstance(Functi * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException */ - Correctness writeInferences(OutputStream stream) throws IOException; + default Correctness writeInferences(OutputStream stream) throws IOException { + final KnowledgeBase knowledgeBase = getKnowledgeBase(); + stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); + return forEachInference((predicate, termList) -> stream + .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); + } /** * Exports all the (explicit and implicit) facts inferred during reasoning of diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index c487cc102..9cdee5143 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -458,7 +458,7 @@ private void validateBeforeQuerying(final PositiveLiteral query) { } @Override - public Correctness writeInferences(OutputStream stream) throws IOException { + public Correctness forEachInference(InferenceAction action) throws IOException { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, @@ -466,8 +466,6 @@ public Correctness writeInferences(OutputStream stream) throws IOException { } final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); - stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); - for (final Predicate predicate : toBeQueriedHeadPredicates) { final PositiveLiteral queryAtom = getQueryAtom(predicate); final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); @@ -475,8 +473,7 @@ public Correctness writeInferences(OutputStream stream) throws IOException { while (answers.hasNext()) { final karmaresearch.vlog.Term[] vlogTerms = answers.next(); final List termList = VLogToModelConverter.toTermList(vlogTerms); - stream.write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri) - .getBytes()); + action.accept(predicate, termList); } } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); From f96db52c92b6f01051dc19753b65da4cd66638c1 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 20:17:13 +0200 Subject: [PATCH 0856/1255] Add Reasoner#getInferences First steps towards #167, still requires more tests. --- .../rulewerk/core/reasoner/Reasoner.java | 41 +++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 1cf1f95fa..1d0241e96 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -25,10 +25,11 @@ import java.io.IOException; import java.io.OutputStream; import java.util.List; -import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Stream; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; @@ -41,6 +42,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** @@ -151,9 +153,42 @@ default Correctness writeInferences(OutputStream stream) throws IOException { .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); } + public class CorrectnessAndInferences { + private Correctness correctness; + private Stream inferences; + + CorrectnessAndInferences(Correctness correctness, Stream inferences) { + this.correctness = correctness; + this.inferences = inferences; + } + + public Correctness getCorrectness() { + return this.correctness; + } + + public Stream getInferences() { + return this.inferences; + } + } + + default CorrectnessAndInferences getInferences() { + Stream.Builder builder = Stream.builder(); + Correctness correctness; + try { + correctness = forEachInference( + (predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); + } catch (IOException e) { + // this will never throw. + throw new RulewerkRuntimeException("unexpected IOException", e); + } + + return new CorrectnessAndInferences(correctness, builder.build()); + } + /** - * Exports all the (explicit and implicit) facts inferred during reasoning of - * the knowledge base to a desired file. + * Exports all the (explicit and + * {@link org.omg.PortableServer.IMPLICIT_ACTIVATION_POLICY_ID}) facts inferred + * during reasoning of the knowledge base to a desired file. * * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the From 8701a75b0322229f0bebdfb4a7061a42c210c0d0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 1 Apr 2020 22:29:45 +0200 Subject: [PATCH 0857/1255] Update paths for vlog-base build script --- build-vlog-library.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index e82a046e0..c1d915137 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -21,6 +21,6 @@ else cd ../../.. fi -mkdir -p rulewerk-core/lib -cp local_builds/jvlog.jar rulewerk-core/lib/jvlog-local.jar +mkdir -p rulewerk-vlog/lib +cp local_builds/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar mvn initialize -Pdevelopment From b41205666af6b7943f6e0b4b4d37a335421c24eb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 15:13:13 +0200 Subject: [PATCH 0858/1255] Core: Rework handling of Correctness in forEachInference --- .../rulewerk/core/reasoner/Reasoner.java | 70 +++++++++++-------- .../rulewerk/reasoner/vlog/VLogReasoner.java | 8 ++- 2 files changed, 45 insertions(+), 33 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 1d0241e96..6eadfc90a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,6 +25,7 @@ import java.io.IOException; import java.io.OutputStream; import java.util.List; +import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Stream; @@ -82,7 +83,7 @@ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { /** - * Factory method that to instantiate a Reasoner with an empty knowledge base. + * Factory method to instantiate a Reasoner with an empty knowledge base. * * @param makeReasoner a function that creates a Reasoner instances given a * {@link KnowledgeBase}. @@ -137,6 +138,23 @@ public interface InferenceAction { */ Correctness forEachInference(InferenceAction action) throws IOException; + /** + * Performs the given action for each inference, swallowing + * checked exceptions. + * + * @param action The action to be performed for ecah inference. + * @return the correctness of the inferences, depending on the + * state of the reasoning (materialisation) and its {@link + * KnowledgeBase}. + */ + default Correctness unsafeForEachInference(BiConsumer> action) { + try { + return forEachInference(action::accept); + } catch (IOException e) { + throw new RulewerkRuntimeException(e); + } + } + /** * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. @@ -153,38 +171,28 @@ default Correctness writeInferences(OutputStream stream) throws IOException { .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); } - public class CorrectnessAndInferences { - private Correctness correctness; - private Stream inferences; - - CorrectnessAndInferences(Correctness correctness, Stream inferences) { - this.correctness = correctness; - this.inferences = inferences; - } - - public Correctness getCorrectness() { - return this.correctness; - } - - public Stream getInferences() { - return this.inferences; - } - } - - default CorrectnessAndInferences getInferences() { + /** + * Return a stream of all inferences. + * + * @return a {@link Stream} of {@link Fact} objects corresponding + * to all inferences. + */ + default Stream getInferences() { Stream.Builder builder = Stream.builder(); - Correctness correctness; - try { - correctness = forEachInference( - (predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); - } catch (IOException e) { - // this will never throw. - throw new RulewerkRuntimeException("unexpected IOException", e); - } + unsafeForEachInference((predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); - return new CorrectnessAndInferences(correctness, builder.build()); + return builder.build(); } + /** + * Return the {@link Correctness} status of query answers. + * + * @return the correctnes of query answers, depending on the state + * of the reasoning (materialisation) and aits {@link + * KnowledgeBase}. + */ + Correctness getCorrectness(); + /** * Exports all the (explicit and * {@link org.omg.PortableServer.IMPLICIT_ACTIVATION_POLICY_ID}) facts inferred diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 9cdee5143..a2a7f1050 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -486,6 +486,10 @@ public Correctness forEachInference(InferenceAction action) throws IOException { return this.correctness; } + public Correctness getCorrectness() { + return this.correctness; + } + private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); From 1a467380d062c508601d7a6d67e53f017bbb615d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 19:19:19 +0200 Subject: [PATCH 0859/1255] Core: Move VLog-specific part of InMemoryDataSource to rulewerk-vlog --- .../rulewerk/core/reasoner/Reasoner.java | 35 +++--- .../implementation/InMemoryDataSource.java | 76 ++----------- .../InMemoryGraphAnalysisExample.java | 15 +-- rulewerk-vlog/pom.xml | 2 +- .../reasoner/vlog/VLogInMemoryDataSource.java | 107 ++++++++++++++++++ .../rulewerk/reasoner/vlog/VLogReasoner.java | 11 +- .../vlog/QueryAnsweringCorrectnessTest.java | 6 +- ...LogDataSourceConfigurationVisitorTest.java | 6 +- .../vlog/VLogReasonerWriteInferencesTest.java | 6 +- 9 files changed, 163 insertions(+), 101 deletions(-) create mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 6eadfc90a..48c994911 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -45,6 +45,7 @@ import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; /** * Interface that exposes the (existential) rule reasoning capabilities of a @@ -94,6 +95,17 @@ static Reasoner getInstance(Function make return getInstance(makeReasoner, KnowledgeBase::new); } + /** + * Factory method to create a suitable {@link InMemoryDataSource} with given + * arity and initial capacity. + * + * @param arity the arity for the data source. + * @param initialCapacity the initial capacity of the data source. + * + * @return an instance of an implementation of InMemoryDataSource. + */ + InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity); + /** * Factory method that to instantiate a Reasoner with an empty knowledge base. * @@ -139,13 +151,11 @@ public interface InferenceAction { Correctness forEachInference(InferenceAction action) throws IOException; /** - * Performs the given action for each inference, swallowing - * checked exceptions. + * Performs the given action for each inference, swallowing checked exceptions. * * @param action The action to be performed for ecah inference. - * @return the correctness of the inferences, depending on the - * state of the reasoning (materialisation) and its {@link - * KnowledgeBase}. + * @return the correctness of the inferences, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. */ default Correctness unsafeForEachInference(BiConsumer> action) { try { @@ -174,8 +184,8 @@ default Correctness writeInferences(OutputStream stream) throws IOException { /** * Return a stream of all inferences. * - * @return a {@link Stream} of {@link Fact} objects corresponding - * to all inferences. + * @return a {@link Stream} of {@link Fact} objects corresponding to all + * inferences. */ default Stream getInferences() { Stream.Builder builder = Stream.builder(); @@ -185,12 +195,11 @@ default Stream getInferences() { } /** - * Return the {@link Correctness} status of query answers. - * - * @return the correctnes of query answers, depending on the state - * of the reasoning (materialisation) and aits {@link - * KnowledgeBase}. - */ + * Return the {@link Correctness} status of query answers. + * + * @return the correctnes of query answers, depending on the state of the + * reasoning (materialisation) and aits {@link KnowledgeBase}. + */ Correctness getCorrectness(); /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index 72af91a42..bdf244ac5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,6 @@ * #L% */ -import java.util.Arrays; - import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -35,12 +33,10 @@ * @author Markus Kroetzsch * */ -public class InMemoryDataSource implements ReasonerDataSource { +public abstract class InMemoryDataSource implements ReasonerDataSource { - String[][] data; - int nextEmptyTuple = 0; - int capacity; - final int arity; + protected int capacity; + protected final int arity; /** * Create a new in-memory data source for facts of the specified arity. The @@ -54,23 +50,6 @@ public class InMemoryDataSource implements ReasonerDataSource { public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; this.arity = arity; - this.data = new String[initialCapacity][arity]; - } - - /** - * Transforms a constant name in a format suitable for the - * reasoner. The default implementation assumes the VLog backend. - * @param constantName the name of the constant - * - * @return a transformed version of constantName that is suitable for the Reasoner. - */ - protected String transformConstantName(String constantName) { - if (!constantName.startsWith("\"") && constantName.contains(":")) { - // enclose IRIs with brackets - return "<" + constantName + ">"; - } - // it's either a datatype literal, or a relative IRI, leave it unchanged - return constantName; } /** @@ -79,51 +58,12 @@ protected String transformConstantName(String constantName) { * * @param constantNames the string names of the constants in this fact */ - public void addTuple(final String... constantNames) { + public abstract void addTuple(final String... constantNames); + + protected void validateArity(final String... constantNames) { if (constantNames.length != this.arity) { throw new IllegalArgumentException("This data source holds tuples of arity " + this.arity + ". Adding a tuple of size " + constantNames.length + " is not possible."); } - if (this.nextEmptyTuple == this.capacity) { - this.capacity = this.capacity * 2; - this.data = Arrays.copyOf(this.data, this.capacity); - } - this.data[this.nextEmptyTuple] = new String[this.arity]; - for (int i = 0; i < this.arity; i++) { - this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); - } - this.nextEmptyTuple++; - } - - /** - * Returns the data stored in this data source, in the format expected by the - * VLog reasoner backend. - * - * @return the data - */ - public String[][] getData() { - if (this.nextEmptyTuple == this.capacity) { - return this.data; - } else { - return Arrays.copyOf(this.data, this.nextEmptyTuple); - } - } - - @Override - public String getSyntacticRepresentation() { - final StringBuilder sb = new StringBuilder( - "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); - for (int i = 0; i < getData().length; i++) { - for (int j = 0; j < this.data[i].length; j++) { - sb.append(this.data[i][j] + " "); - } - sb.append("\n"); - } - return sb.toString(); - } - - @Override - public void accept(DataSourceConfigurationVisitor visitor) { - visitor.visit(this); } } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 591261fb1..838ea639e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,19 +27,20 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogInMemoryDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; /** * This example shows how to reason efficiently with data sets generated in * Java. We generate a random graph with several million edges, check * connectivity, and count triangles. - * + * * Parameters can be modified to obtain graphs of different sizes and density. * It should be noted, however, that the number of triangles in reasonably dense * graphs tends to be huge, and it is easy to exhaust memory in this way. - * + * * @author Markus Kroetzsch * */ @@ -53,7 +54,7 @@ public static void main(final String[] args) throws ParsingException, IOExceptio final int vertexCount = 10000; final double density = 0.03; // initialise data source for storing edges (estimate how many we'll need) - final InMemoryDataSource edges = new InMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); + final InMemoryDataSource edges = new VLogInMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); int edgeCount = 0; for (int i = 1; i <= vertexCount; i++) { for (int j = 1; j <= vertexCount; j++) { @@ -64,7 +65,7 @@ public static void main(final String[] args) throws ParsingException, IOExceptio } } // also make a unary data source to mark vertices: - final InMemoryDataSource vertices = new InMemoryDataSource(1, vertexCount); + final InMemoryDataSource vertices = new VLogInMemoryDataSource(1, vertexCount); for (int i = 1; i <= vertexCount; i++) { vertices.addTuple("v" + i); } diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 7aa17b881..6fd6c8d5b 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -14,7 +14,7 @@ jar Rulewerk VLog Reasoner Support - and model + Bindings for the VLog reasoner backend. 1.3.3-snapshot diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java new file mode 100644 index 000000000..9e5fabd64 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -0,0 +1,107 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Arrays; + +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; + +/** + * Implementation of {@link InMemoryDataSource} for the VLog backend. + */ +public class VLogInMemoryDataSource extends InMemoryDataSource { + String[][] data; + int nextEmptyTuple = 0; + + public VLogInMemoryDataSource(final int arity, final int initialCapacity) { + super(arity, initialCapacity); + this.data = new String[initialCapacity][arity]; + } + + /** + * Transforms a constant name in a format suitable for the + * reasoner. The default implementation assumes the VLog backend. + * @param constantName the name of the constant + * + * @return a transformed version of constantName that is suitable for the Reasoner. + */ + protected String transformConstantName(String constantName) { + if (!constantName.startsWith("\"") && constantName.contains(":")) { + // enclose IRIs with brackets + return "<" + constantName + ">"; + } + // it's either a datatype literal, or a relative IRI, leave it unchanged + return constantName; + } + + /** + * Adds a fact to this data source. The number of constant names must agree with + * the arity of this data source. + * + * @param constantNames the string names of the constants in this fact + */ + public void addTuple(final String... constantNames) { + validateArity(constantNames); + + if (this.nextEmptyTuple == this.capacity) { + this.capacity = this.capacity * 2; + this.data = Arrays.copyOf(this.data, this.capacity); + } + this.data[this.nextEmptyTuple] = new String[this.arity]; + for (int i = 0; i < this.arity; i++) { + this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); + } + this.nextEmptyTuple++; + } + + /** + * Returns the data stored in this data source, in the format expected by the + * VLog reasoner backend. + * + * @return the data + */ + public String[][] getData() { + if (this.nextEmptyTuple == this.capacity) { + return this.data; + } else { + return Arrays.copyOf(this.data, this.nextEmptyTuple); + } + } + + @Override + public String getSyntacticRepresentation() { + final StringBuilder sb = new StringBuilder( + "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); + for (int i = 0; i < getData().length; i++) { + for (int j = 0; j < this.data[i].length; j++) { + sb.append(this.data[i][j] + " "); + } + sb.append("\n"); + } + return sb.toString(); + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index a2a7f1050..a763af891 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -111,6 +111,11 @@ public KnowledgeBase getKnowledgeBase() { return this.knowledgeBase; } + @Override + public InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity) { + return new VLogInMemoryDataSource(arity, initialCapacity); + } + @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); @@ -222,9 +227,9 @@ void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { } void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { - if (dataSource instanceof InMemoryDataSource) { + if (dataSource instanceof VLogInMemoryDataSource) { - final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; + final VLogInMemoryDataSource inMemoryDataSource = (VLogInMemoryDataSource) dataSource; try { load(predicate, inMemoryDataSource); } catch (final EDBConfigurationException e) { @@ -233,7 +238,7 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica } } - void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java index d9cc90601..583b34229 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -71,7 +71,7 @@ public class QueryAnsweringCorrectnessTest { private static final Fact factQg = Expressions.makeFact(predQ, g); private static final Fact factQh = Expressions.makeFact(predQ, h); - private static final InMemoryDataSource datasource = new InMemoryDataSource(1, 2); + private static final InMemoryDataSource datasource = new VLogInMemoryDataSource(1, 2); { datasource.addTuple("e"); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java index b4b860f58..df53efe03 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -93,7 +93,7 @@ public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLExce @Test public void visit_InMemoryDataSource_returnsNull() { - final InMemoryDataSource inMemoryDataSource = new InMemoryDataSource(1, 1); + final InMemoryDataSource inMemoryDataSource = new VLogInMemoryDataSource(1, 1); assertEquals(null, toConfigString(inMemoryDataSource)); } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 5eb58262e..81835692d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -80,7 +80,7 @@ public class VLogReasonerWriteInferencesTest { private final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); private final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), Expressions.makeAbstractConstant("germany")); - private final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + private final InMemoryDataSource locations = new VLogInMemoryDataSource(2, 1); private KnowledgeBase kb; @Before From 3a83965722ea1c0eb78eba46bb0c9187b8b2fa24 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 19:52:57 +0200 Subject: [PATCH 0860/1255] VLog: Add regression tests --- .../rulewerk/reasoner/vlog/VLogReasoner.java | 4 +- .../reasoner/vlog/VLogRegressionTest.java | 78 +++++++++++++++++++ 2 files changed, 79 insertions(+), 3 deletions(-) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index a763af891..a40e8cd0d 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -21,7 +21,6 @@ */ import java.io.IOException; -import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -43,7 +42,6 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; import org.semanticweb.rulewerk.core.reasoner.AcyclicityNotion; import org.semanticweb.rulewerk.core.reasoner.Algorithm; import org.semanticweb.rulewerk.core.reasoner.Correctness; @@ -56,8 +54,8 @@ import org.semanticweb.rulewerk.core.reasoner.ReasonerState; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; import org.semanticweb.rulewerk.core.reasoner.implementation.EmptyQueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java new file mode 100644 index 000000000..0bd2434c1 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java @@ -0,0 +1,78 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +public class VLogRegressionTest { + @Test + public void test_issue_166() throws IOException { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + + final Predicate A = Expressions.makePredicate("A", 1); + final Predicate B = Expressions.makePredicate("B", 1); + final Predicate C = Expressions.makePredicate("C", 1); + final Predicate R = Expressions.makePredicate("Rel", 1); + + final AbstractConstant star = Expressions.makeAbstractConstant("star"); + final AbstractConstant cy = Expressions.makeAbstractConstant("cy"); + final AbstractConstant r0 = Expressions.makeAbstractConstant("r0"); + final UniversalVariable x0 = Expressions.makeUniversalVariable("x0"); + final UniversalVariable x2 = Expressions.makeUniversalVariable("x2"); + + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(B, x2), + Expressions.makePositiveLiteral(A, x2))); + knowledgeBase.addStatement(Expressions.makeFact(B, star)); + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(R, r0), + Expressions.makePositiveLiteral(C, cy), + Expressions.makePositiveLiteral(B, x0))); + knowledgeBase.addStatement(Expressions.makeFact(C, cy)); + + try (final Reasoner reasoner = new VLogReasoner(knowledgeBase)) { + reasoner.reason(); + final QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral(R, x0), false); + assertTrue(result.hasNext()); + final QueryResult terms = result.next(); + assertFalse(result.hasNext()); + final List expectedTerms = new ArrayList(); + expectedTerms.add(r0); + assertEquals(expectedTerms, terms.getTerms()); + } + } +} From 1d111d474a4409efd6fc588c6d999893a6ee6889 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 20:04:43 +0200 Subject: [PATCH 0861/1255] Update documentation --- README.md | 5 +++-- RELEASE-NOTES.md | 5 ++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index f950b8107..11ee4e1fc 100644 --- a/README.md +++ b/README.md @@ -27,10 +27,11 @@ You need to use Java 1.8 or above. Available modules include: * **rulewerk-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **rulewerk-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API * **rulewerk-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/rulewerk/wiki/Standalone-client) for Rulewerk. +* **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use rulewerk-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: -* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of rulewerk-base. +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog-base. * Run ```mvn install``` to test if the setup works diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index d0408e336..da1594e58 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -15,11 +15,14 @@ Breaking changes: changing several import paths. `Reasoner.getInstance()` now takes a mandatory argument, a function taking a `KnowledgeBase` and returning a `Reasoner` instance. Previous behaviour can be obtained - by using `Reasoner.getInstance(VLogReasoner::new)`. + by using `Reasoner.getInstance(VLogReasoner::new)`. As a result, + InMemoryDataSource has become an abstract class, use + VLogInMemoryDataSource where applicable. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` * All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` * Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` * Rules files may import other rules files using `@import` and `@import-relative`, where the latter resolves relative IRIs using From 4001a34945a592820546178919285fecdffa49e2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 15 May 2020 21:39:40 +0200 Subject: [PATCH 0862/1255] VLog: Add another test case --- .../reasoner/vlog/VLogRegressionTest.java | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java index 0bd2434c1..4aa92c75b 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java @@ -75,4 +75,35 @@ public void test_issue_166() throws IOException { assertEquals(expectedTerms, terms.getTerms()); } } + + @Test + public void test_vlog_issue_44() throws IOException { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + + final Predicate P = Expressions.makePredicate("P", 1); + final Predicate Q = Expressions.makePredicate("Q", 1); + final Predicate R = Expressions.makePredicate("R", 1); + + final AbstractConstant c = Expressions.makeAbstractConstant("c"); + final AbstractConstant d = Expressions.makeAbstractConstant("d"); + final UniversalVariable x = Expressions.makeUniversalVariable("x"); + + knowledgeBase.addStatement(Expressions.makeFact(P, c)); + knowledgeBase.addStatement(Expressions.makeFact(Q, d)); + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(R, x), + Expressions.makePositiveLiteral(P, x), + Expressions.makeNegativeLiteral(Q, x))); + + try (final Reasoner reasoner = new VLogReasoner(knowledgeBase)) { + reasoner.reason(); + final QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral(R, x), false); + assertTrue(result.hasNext()); + final QueryResult terms = result.next(); + assertFalse(result.hasNext()); + final List expectedTerms = new ArrayList(); + expectedTerms.add(c); + assertEquals(expectedTerms, terms.getTerms()); + assertFalse(result.hasNext()); + } + } } From 8ec7ab9403320bd7dba1dc4f796cb441d84fd407 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Sun, 17 May 2020 15:07:32 +0200 Subject: [PATCH 0863/1255] VLog: Fix formatting --- .../rulewerk/reasoner/vlog/VLogRegressionTest.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java index 4aa92c75b..d79fceab3 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java @@ -1,8 +1,5 @@ package org.semanticweb.rulewerk.reasoner.vlog; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - /* * #%L * Rulewerk VLog Reasoner Support @@ -24,6 +21,8 @@ */ import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import java.io.IOException; import java.util.ArrayList; From 0f2fccbbe756952acdf6873fc5f585789b04d31b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 18 May 2020 21:25:11 +0200 Subject: [PATCH 0864/1255] Address review comments --- RELEASE-NOTES.md | 9 ++-- .../rulewerk/core/reasoner/Reasoner.java | 49 ++----------------- .../implementation/CsvFileDataSource.java | 6 +-- .../DataSourceConfigurationVisitor.java | 38 ++++++++++++-- .../implementation/FileDataSource.java | 31 ++---------- .../implementation/RdfFileDataSource.java | 6 +-- .../implementation/ReasonerDataSource.java | 8 +-- .../SparqlQueryResultDataSource.java | 19 +------ .../core/AddDataFromSparqlQueryResults.java | 6 +-- .../core/ConfigureReasonerLogging.java | 6 +-- .../examples/graal/AddDataFromDlgpFile.java | 6 +-- .../VLogDataSourceConfigurationVisitor.java | 27 +++++++--- .../reasoner/vlog/VLogKnowledgeBase.java | 12 +++-- .../rulewerk/reasoner/vlog/VLogReasoner.java | 5 -- .../reasoner/vlog/CsvFileDataSourceTest.java | 25 ++-------- .../vlog/FileDataSourceTestUtils.java | 5 +- .../rulewerk/reasoner/vlog/LoggingTest.java | 6 +-- .../reasoner/vlog/RdfFileDataSourceTest.java | 10 ++-- .../reasoner/vlog/ReasonerTimeoutTest.java | 6 +-- ...LogDataSourceConfigurationVisitorTest.java | 27 +++++++--- .../reasoner/vlog/VLogReasonerBasics.java | 8 +-- .../reasoner/vlog/VLogReasonerStateTest.java | 16 +++--- 22 files changed, 145 insertions(+), 186 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index da1594e58..9f52a1d57 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -12,12 +12,9 @@ Breaking changes: * The `FileDataSource` constructor and those of derived classes now take the path to a file instead of `File` object. * The VLog backend has been moved to a new `rulewerk-vlog` module, - changing several import paths. `Reasoner.getInstance()` now takes a - mandatory argument, a function taking a `KnowledgeBase` and - returning a `Reasoner` instance. Previous behaviour can be obtained - by using `Reasoner.getInstance(VLogReasoner::new)`. As a result, - InMemoryDataSource has become an abstract class, use - VLogInMemoryDataSource where applicable. + changing several import paths. `Reasoner.getInstance()` is + gone. Furthermore, InMemoryDataSource has become an abstract class, + use VLogInMemoryDataSource where applicable. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 48c994911..dcc3cea7b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -82,46 +82,6 @@ */ public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { - - /** - * Factory method to instantiate a Reasoner with an empty knowledge base. - * - * @param makeReasoner a function that creates a Reasoner instances given a - * {@link KnowledgeBase}. - * - * @return a {@link Reasoner} instance. - */ - static Reasoner getInstance(Function makeReasoner) { - return getInstance(makeReasoner, KnowledgeBase::new); - } - - /** - * Factory method to create a suitable {@link InMemoryDataSource} with given - * arity and initial capacity. - * - * @param arity the arity for the data source. - * @param initialCapacity the initial capacity of the data source. - * - * @return an instance of an implementation of InMemoryDataSource. - */ - InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity); - - /** - * Factory method that to instantiate a Reasoner with an empty knowledge base. - * - * @param makeReasoner a function that creates a Reasoner instances given a - * {@link KnowledgeBase}. - * @param makeKnowledgeBase a function that creates a {@link KnowledgeBase} - * instance. - * - * @return a {@link Reasoner} instance. - */ - static Reasoner getInstance(Function makeReasoner, - Supplier makeKnowledgeBase) { - final KnowledgeBase knowledgeBase = makeKnowledgeBase.get(); - return makeReasoner.apply(knowledgeBase); - } - /** * Getter for the knowledge base to reason on. * @@ -153,7 +113,7 @@ public interface InferenceAction { /** * Performs the given action for each inference, swallowing checked exceptions. * - * @param action The action to be performed for ecah inference. + * @param action The action to be performed for each inference. * @return the correctness of the inferences, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. */ @@ -198,14 +158,13 @@ default Stream getInferences() { * Return the {@link Correctness} status of query answers. * * @return the correctnes of query answers, depending on the state of the - * reasoning (materialisation) and aits {@link KnowledgeBase}. + * reasoning (materialisation) and its {@link KnowledgeBase}. */ Correctness getCorrectness(); /** - * Exports all the (explicit and - * {@link org.omg.PortableServer.IMPLICIT_ACTIVATION_POLICY_ID}) facts inferred - * during reasoning of the knowledge base to a desired file. + * Exports all the (explicit and implicit) facts inferred during + * reasoning of the knowledge base to a desired file. * * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 3ee0a4574..7f72f25de 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -78,7 +78,7 @@ public String getSyntacticRepresentation() { } @Override - public void accept(DataSourceConfigurationVisitor visitor) { + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java index 82b3d11de..9ead436c5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,12 +20,42 @@ * #L% */ +import java.io.IOException; + +/** + * A visitor to generate (reasoner-specific) configuration for the various data + * sources. + * + * @author Maximilian Marx + */ public interface DataSourceConfigurationVisitor { - public void visit(CsvFileDataSource dataSource); + /** + * Configure the reasoner for a {@link CsvFileDataSource}. + * + * @param dataSource the data source to configure. + * @throws IOexception when an IO error occurs during configuration. + */ + public void visit(CsvFileDataSource dataSource) throws IOException; - public void visit(RdfFileDataSource dataSource); + /** + * Configure the reasoner for a {@link RdfFileDataSource}. + * + * @param dataSource the data source to configure. + * @throws IOexception when an IO error occurs during configuration. + */ + public void visit(RdfFileDataSource dataSource) throws IOException; + /** + * Configure the reasoner for a {@link SparqlQueryResultDataSource}. + * + * @param dataSource the data source to configure. + */ public void visit(SparqlQueryResultDataSource dataSource); + /** + * Configure the reasoner for a {@link InMemoryDataSource}. + * + * @param dataSource the data source to configure. + */ public void visit(InMemoryDataSource dataSource); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index cbdb10e61..46ec295de 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +22,6 @@ import java.io.File; import java.io.IOException; -import java.nio.file.Paths; import java.util.Optional; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -43,11 +42,6 @@ public abstract class FileDataSource implements ReasonerDataSource { private final String filePath; private final String fileName; private final String extension; - /** - * The canonical path to the parent directory where the file resides. - */ - private final String dirCanonicalPath; - private final String fileNameWithoutExtension; /** * Constructor. @@ -68,8 +62,7 @@ public FileDataSource(final String filePath, final Iterable possibleExte this.filePath = filePath; // unmodified file path, necessary for correct serialisation this.fileName = this.file.getName(); this.extension = getValidExtension(this.fileName, possibleExtensions); - this.fileNameWithoutExtension = this.fileName.substring(0, this.fileName.lastIndexOf(this.extension)); - this.dirCanonicalPath = Paths.get(file.getCanonicalPath()).getParent().toString(); + file.getCanonicalPath(); // make sure that the path is valid. } private String getValidExtension(final String fileName, final Iterable possibleExtensions) { @@ -96,22 +89,8 @@ public String getName() { return this.fileName; } - /** - * Canonicalise the file path - * - * @return The canonical path to the parent directory where the file resides. - */ - public String getDirCanonicalPath() { - return this.dirCanonicalPath; - } - - /** - * Get the base name of the file, without an extension. - * - * @return the file basename without any extension. - */ - public String getFileNameWithoutExtension() { - return this.fileNameWithoutExtension; + public String getExtension() { + return this.extension; } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 29a3f327f..9df6c5d26 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -82,7 +82,7 @@ public Optional getRequiredArity() { } @Override - public void accept(DataSourceConfigurationVisitor visitor) { + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java index 57c6e1dee..96020fceb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,8 @@ * #L% */ +import java.io.IOException; + import org.semanticweb.rulewerk.core.model.api.DataSource; /** @@ -32,5 +34,5 @@ public interface ReasonerDataSource extends DataSource { * * @param visitor the visitor. */ - public void accept(DataSourceConfigurationVisitor visitor); + public void accept(DataSourceConfigurationVisitor visitor) throws IOException; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index cdc8723d2..99f8548e0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -108,21 +108,6 @@ public String getQueryVariables() { return this.queryVariables; } - // @Override - // public final String toConfigString() { - // final String configStringPattern = - - // PREDICATE_NAME_CONFIG_LINE + - - // DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - - // "EDB%1$d_param0=" + this.endpoint + "\n" + "EDB%1$d_param1=" + this.queryVariables + "\n" + - - // "EDB%1$d_param2=" + this.queryBody + "\n"; - - // return configStringPattern; - // } - static String getQueryVariablesList(LinkedHashSet queryVariables) { final StringBuilder sb = new StringBuilder(); final Iterator iterator = queryVariables.iterator(); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index 2f74a96fa..b32c784ea 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -121,7 +121,7 @@ public static void main(final String[] args) throws IOException { */ final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); - try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index 902fbf543..6eeb04d60 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -66,7 +66,7 @@ public class ConfigureReasonerLogging { public static void main(final String[] args) throws IOException, ParsingException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* exists z. B(?y, !z) :- A(?x, ?y) . */ kb.addStatements(RuleParser.parseRule("B(?Y, !Z) :- A(?X, ?Y) .")); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 1f9621bd8..14e0116ba 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException { * the reasoner automatically. */ - try (Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { final KnowledgeBase kb = reasoner.getKnowledgeBase(); /* diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java index dd4ac05f9..f5396dc24 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,12 @@ * #L% */ +import java.io.IOException; +import java.nio.file.Paths; + import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; + import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -39,21 +43,30 @@ public String getConfigString() { return configString; } - protected void setFileConfigString(FileDataSource dataSource) { + protected void setFileConfigString(FileDataSource dataSource) throws IOException { this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + dataSource.getDirCanonicalPath() + "\n" + - "EDB%1$d_param1=" + dataSource.getFileNameWithoutExtension() + "\n"; + "EDB%1$d_param0=" + getDirCanonicalPath(dataSource) + "\n" + + "EDB%1$d_param1=" + getFileNameWithoutExtension(dataSource) + "\n"; + } + + String getDirCanonicalPath(FileDataSource dataSource) throws IOException { + return Paths.get(dataSource.getFile().getCanonicalPath()).getParent().toString(); + } + + String getFileNameWithoutExtension(FileDataSource dataSource) { + final String fileName = dataSource.getName(); + return fileName.substring(0, fileName.lastIndexOf(dataSource.getExtension())); } @Override - public void visit(CsvFileDataSource dataSource) { + public void visit(CsvFileDataSource dataSource) throws IOException { setFileConfigString(dataSource); } @Override - public void visit(RdfFileDataSource dataSource) { + public void visit(RdfFileDataSource dataSource) throws IOException { setFileConfigString(dataSource); } diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java index 3bd57a52f..6af5a9c39 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,7 @@ * #L% */ +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Formatter; @@ -30,6 +31,7 @@ import java.util.Map.Entry; import java.util.Set; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -124,7 +126,11 @@ int addDataSourceConfigurationString(final DataSource dataSource, final Predicat if (dataSource instanceof ReasonerDataSource) { final ReasonerDataSource reasonerDataSource = (ReasonerDataSource) dataSource; final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); - reasonerDataSource.accept(visitor); + try { + reasonerDataSource.accept(visitor); + } catch (IOException e) { + throw new RulewerkRuntimeException("Error while building VLog data source configuration", e); + } final String configString = visitor.getConfigString(); if (configString != null) { formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index a40e8cd0d..695be1fca 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -109,11 +109,6 @@ public KnowledgeBase getKnowledgeBase() { return this.knowledgeBase; } - @Override - public InMemoryDataSource makeInMemoryDataSource(final int arity, final int initialCapacity) { - return new VLogInMemoryDataSource(arity, initialCapacity); - } - @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java index d85f73d09..593993f73 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -52,24 +52,7 @@ public void testConstructor() throws IOException { final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); - FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, new File(csvFile).getName(), dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, new File(csvFile).getName()); + FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName()); } - - @Test - public void testNoParentDir() throws IOException { - final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); - final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); - final String currentFolder = new File(".").getCanonicalPath(); - assertEquals(currentFolder, dirCanonicalPath); - } - - @Test - public void testNotNormalisedParentDir() throws IOException { - final FileDataSource fileDataSource = new CsvFileDataSource("./././file.csv"); - final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); - final String currentFolder = new File(".").getCanonicalPath(); - assertEquals(currentFolder, dirCanonicalPath); - } - } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java index 6a0819ed3..73128188c 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java @@ -91,11 +91,8 @@ public static List> getCSVContent(final String csvFile) throws IOEx return content; } - public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName, - final String expectedDirCanonicalPath, final String expectedFileNameWithoutExtension) throws IOException { + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName) throws IOException { assertEquals(expectedFileName, fileDataSource.getName()); - assertEquals(expectedDirCanonicalPath, fileDataSource.getDirCanonicalPath()); - assertEquals(expectedFileNameWithoutExtension, fileDataSource.getFileNameWithoutExtension()); } public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java index 598ea90c9..57691fc99 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -109,7 +109,7 @@ public void testSetLogFileInexistent() throws IOException { @Test(expected = NullPointerException.class) public void testSetLogLevelNull() { - try (final Reasoner instance = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner instance = new VLogReasoner(new KnowledgeBase())) { instance.setLogLevel(null); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java index f16ab06f2..0ab0a7989 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -47,12 +47,10 @@ public void testConstructorFalseExtension() throws IOException { @Test public void testConstructor() throws IOException { - - final String dirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); - FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName(), dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName()); + FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName()); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java index 415e03ccf..5ec62cfbb 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -119,7 +119,7 @@ public void setUp() { @Test(expected = IllegalArgumentException.class) public void testSetReasoningTimeout() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.setReasoningTimeout(-3); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java index df53efe03..c11752655 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java @@ -30,14 +30,13 @@ import java.util.LinkedHashSet; import org.junit.Test; - import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class VLogDataSourceConfigurationVisitorTest { @@ -77,7 +76,7 @@ public void visit_RdfFileDataSource_succeeds() throws IOException { } @Test - public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLException { + public void visit_SparqlQueryResultDataSource_succeeds() throws IOException, MalformedURLException { final SparqlQueryResultDataSource simpleDataSource = new SparqlQueryResultDataSource(endpoint, "b,a", "?a wdt:P22 ?b"); final LinkedHashSet queryVariables = new LinkedHashSet<>( @@ -92,12 +91,28 @@ public void visit_SparqlQueryResultDataSource_succeeds() throws MalformedURLExce } @Test - public void visit_InMemoryDataSource_returnsNull() { + public void visit_InMemoryDataSource_returnsNull() throws IOException { final InMemoryDataSource inMemoryDataSource = new VLogInMemoryDataSource(1, 1); assertEquals(null, toConfigString(inMemoryDataSource)); } - private String toConfigString(ReasonerDataSource dataSource) { + @Test + public void getDirCanonicalPath_relativePath_succeeds() throws IOException { + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); + final String currentFolder = new File(".").getCanonicalPath(); + assertEquals(currentFolder, visitor.getDirCanonicalPath(fileDataSource)); + } + + @Test + public void getDirCanonicalPath_nonNormalisedPath_succeeds() throws IOException { + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + final FileDataSource fileDataSource = new CsvFileDataSource("./././file.csv"); + final String currentFolder = new File(".").getCanonicalPath(); + assertEquals(currentFolder, visitor.getDirCanonicalPath(fileDataSource)); + } + + private String toConfigString(ReasonerDataSource dataSource) throws IOException { VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); dataSource.accept(visitor); return visitor.getConfigString(); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java index ca6a3bed4..8e2b07f57 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java @@ -12,9 +12,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -59,14 +59,14 @@ public class VLogReasonerBasics { @Test(expected = NullPointerException.class) public void testSetAlgorithmNull() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.setAlgorithm(null); } } @Test(expected = NullPointerException.class) public void setRuleRewriteStrategy1() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new);) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.setRuleRewriteStrategy(null); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java index 17f3dc8cc..0ea81efbb 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -68,21 +68,21 @@ public class VLogReasonerStateTest { @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.answerQuery(exampleQueryAtom, true); } } @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); } } @Test(expected = ReasonerStateException.class) public void testFailAnswerQueryAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.reason(); reasoner.resetReasoner(); reasoner.answerQuery(exampleQueryAtom, true); @@ -91,7 +91,7 @@ public void testFailAnswerQueryAfterReset() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswersAfterReset() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.reason(); reasoner.resetReasoner(); reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); @@ -172,7 +172,7 @@ public void testAddFacts2() throws IOException { @Test public void testResetBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.resetReasoner(); } } @@ -280,7 +280,7 @@ public void testResetEmptyKnowledgeBase() throws IOException { @Test(expected = ReasonerStateException.class) public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { - try (final Reasoner reasoner = Reasoner.getInstance(VLogReasoner::new)) { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", true); } From f9e5dad35cb9e0a400fa1a88ca2358c0915e6741 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 May 2020 10:04:31 +0200 Subject: [PATCH 0865/1255] Enable deprecation warnings --- pom.xml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pom.xml b/pom.xml index bbe69eb09..5cff39168 100644 --- a/pom.xml +++ b/pom.xml @@ -228,6 +228,9 @@ 1.8 1.8 + + -Xlint:deprecation + From 0d8a7ec62993da766bbc193b33a3a29e02d01ec3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 May 2020 10:04:48 +0200 Subject: [PATCH 0866/1255] VLog: Add more tests --- .../reasoner/vlog/VLogQueryResultUtils.java | 24 ++++++-- .../vlog/VLogReasonerWriteInferencesTest.java | 59 +++++++++++++++++++ 2 files changed, 79 insertions(+), 4 deletions(-) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java index d192add94..782123d7c 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,10 +22,14 @@ import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.function.BiConsumer; + +import org.semanticweb.rulewerk.core.model.api.Predicate; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; @@ -33,7 +37,7 @@ /** * Utility class with static methods used for collecting query results for * testing purposes. - * + * * @author Irina Dragoste * */ @@ -46,7 +50,7 @@ private VLogQueryResultUtils() { * Collects TermQueryResultIterator results into a Set. Transforms the array of * {@link Term}s into a set of {@link Term}s. Asserts that the results do not * contain duplicates. Closes the iterator after collecting the results. - * + * * @param queryResultIterator * @return a set of unique query result. A query result is a List of Term * tuples. @@ -61,4 +65,16 @@ static Set> collectResults(final TermQueryResultIterator queryResultI return answers; } + @SuppressWarnings("unchecked") + private static void sneakyThrow(Throwable e) throws E { + throw (E) e; + } + + /** + * Throw an {@link IOException}, uncheckedly. Needed for testing + * {@link VLogReasoner#unsafeForEachInference}. + */ + static void sneakilyThrowIOException() { + sneakyThrow(new IOException()); + } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 81835692d..e7342762d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -25,16 +25,20 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiConsumer; import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.Before; import org.junit.Test; + import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Conjunction; import org.semanticweb.rulewerk.core.model.api.Constant; @@ -43,11 +47,14 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.Reasoner.InferenceAction; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class VLogReasonerWriteInferencesTest { @@ -123,6 +130,58 @@ public void writeInferences_withBase_writesBase() throws IOException, PrefixDecl assertTrue("the base declaration is present", getInferences().contains("@base .")); } + @Test + public void getInferences_example_succeeds() throws IOException { + final List inferences = getInferences(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + final List fromStream = reasoner.getInferences() + .map(Fact::getSyntacticRepresentation) + .collect(Collectors.toList()); + assertEquals(inferences, fromStream); + } + } + + @Test + public void unsafeForEachInference_example_succeeds() throws IOException { + final List inferences = getInferences(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + final List fromUnsafe = new ArrayList<>(); + + reasoner.unsafeForEachInference((Predicate, terms) -> { + fromUnsafe.add(Expressions.makeFact(Predicate, terms).getSyntacticRepresentation()); + }); + + assertEquals(inferences, fromUnsafe); + } + } + + @Test(expected = IOException.class) + public void forEachInference_throwingAction_throws() throws IOException { + InferenceAction action = mock(InferenceAction.class); + doThrow(IOException.class).when(action).accept(any(Predicate.class), anyList()); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + reasoner.forEachInference(action); + } + } + + private class ThrowingConsumer implements BiConsumer> { + @Override + public void accept(Predicate predicate, List terms) { + VLogQueryResultUtils.sneakilyThrowIOException(); + } + } + + @Test(expected = RulewerkRuntimeException.class) + public void unsafeForEachInference_throwingAction_throws() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + reasoner.unsafeForEachInference(new ThrowingConsumer()); + } + } + private List getInferences() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); From 5a8e1eefa89f34caa53789b9ee5ee795a444cff0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 19 May 2020 18:24:49 +0200 Subject: [PATCH 0867/1255] Address Review Comments --- .../rulewerk/core/reasoner/Reasoner.java | 3 -- .../CsvFileDataSourceTest.java | 8 +-- .../FileDataSourceTestUtils.java | 52 +++++++++++++++++++ .../RdfFileDataSourceTest.java | 7 +-- .../SparqlQueryResultDataSourceTest.java | 10 ++-- .../rulewerk/parser/DirectiveHandler.java | 6 +-- .../ImportFileRelativeDirectiveHandler.java | 5 +- .../parser/RuleParserParseFactTest.java | 11 ++-- .../rulewerk/reasoner/vlog/VLogReasoner.java | 4 +- .../reasoner/vlog/VLogToModelConverter.java | 23 ++++---- .../reasoner/vlog/VLogQueryResultUtils.java | 3 -- .../vlog/VLogReasonerWriteInferencesTest.java | 6 +-- 12 files changed, 79 insertions(+), 59 deletions(-) rename {rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation}/CsvFileDataSourceTest.java (82%) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java rename {rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation}/RdfFileDataSourceTest.java (87%) rename {rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation}/SparqlQueryResultDataSourceTest.java (86%) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index dcc3cea7b..276398c62 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -26,8 +26,6 @@ import java.io.OutputStream; import java.util.List; import java.util.function.BiConsumer; -import java.util.function.Function; -import java.util.function.Supplier; import java.util.stream.Stream; import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; @@ -45,7 +43,6 @@ import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.model.implementation.Serializer; -import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; /** * Interface that exposes the (existential) rule reasoning capabilities of a diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java similarity index 82% rename from rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index 593993f73..119e8057d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,16 +20,11 @@ * #L% */ -import static org.junit.Assert.assertEquals; - import java.io.File; import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; - public class CsvFileDataSourceTest { private final String ntFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; @@ -48,7 +43,6 @@ public void testConstructorFalseExtension() throws IOException { @Test public void testConstructor() throws IOException { - final String dirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java new file mode 100644 index 000000000..057ceba16 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +/** + * Utility class for reading from and writing to data source files. + * + * @author Christian Lewe + * @author Irina Dragoste + * + */ +public final class FileDataSourceTestUtils { + + public static final String INPUT_FOLDER = "src/test/data/input/"; + + /* + * This is a utility class. Therefore, it is best practice to do the following: + * (1) Make the class final, (2) make its constructor private, (3) make all its + * fields and methods static. This prevents the classes instantiation and + * inheritance. + */ + private FileDataSourceTestUtils() { + + } + + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName) + throws IOException { + assertEquals(expectedFileName, fileDataSource.getName()); + } +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java similarity index 87% rename from rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index 0ab0a7989..3bb39bc8d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -1,6 +1,4 @@ -package org.semanticweb.rulewerk.reasoner.vlog; - -import static org.junit.Assert.assertEquals; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -27,9 +25,6 @@ import org.junit.Test; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; - public class RdfFileDataSourceTest { private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java similarity index 86% rename from rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index 81f6aaa92..6e99a539f 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,6 @@ * #L% */ -import static org.junit.Assert.assertEquals; - import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -32,8 +30,6 @@ import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class SparqlQueryResultDataSourceTest { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index ae6c25251..76c75e716 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,13 +21,11 @@ */ import java.io.File; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.nio.file.InvalidPathException; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 1ef7a4372..7de06a3ea 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,6 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; -import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.DirectiveArgument; diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java index dee7199c1..d39446d31 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,20 +20,15 @@ * #L% */ -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; public class RuleParserParseFactTest implements ParserTestUtils { diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 695be1fca..ea35c37f8 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -54,7 +54,6 @@ import org.semanticweb.rulewerk.core.reasoner.ReasonerState; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; import org.semanticweb.rulewerk.core.reasoner.implementation.EmptyQueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -231,7 +230,8 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica } } - void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSource) + throws EDBConfigurationException { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index 581e13368..c68d68234 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,6 @@ import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.Serializer; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** @@ -84,15 +83,15 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { static Term toTerm(karmaresearch.vlog.Term vLogTerm) { final String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { - case CONSTANT: - return toConstant(name); - case BLANK: - return new NamedNullImpl(name); - case VARIABLE: - throw new IllegalArgumentException( - "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); - default: - throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); + case CONSTANT: + return toConstant(name); + case BLANK: + return new NamedNullImpl(name); + case VARIABLE: + throw new IllegalArgumentException( + "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); + default: + throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java index 782123d7c..8f3d65308 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java @@ -27,9 +27,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.function.BiConsumer; - -import org.semanticweb.rulewerk.core.model.api.Predicate; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index e7342762d..19365056e 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -51,7 +51,6 @@ import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.core.reasoner.Reasoner.InferenceAction; @@ -135,9 +134,8 @@ public void getInferences_example_succeeds() throws IOException { final List inferences = getInferences(); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final List fromStream = reasoner.getInferences() - .map(Fact::getSyntacticRepresentation) - .collect(Collectors.toList()); + final List fromStream = reasoner.getInferences().map(Fact::getSyntacticRepresentation) + .collect(Collectors.toList()); assertEquals(inferences, fromStream); } } From 38a49793c88d1deda382b9d61b00d767bc175040 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 19 May 2020 19:52:17 +0200 Subject: [PATCH 0868/1255] rename vlog jar from vlog-base to vlog-java --- rulewerk-vlog/pom.xml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 6fd6c8d5b..d8438ed7d 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -18,6 +18,7 @@ 1.3.3-snapshot + vlog-java @@ -29,7 +30,7 @@ ${project.groupId} - vlog-base + ${karmaresearch.vlog.artifactId} ${karmaresearch.vlog.version} @@ -55,7 +56,7 @@ ${project.groupId} - vlog-base + ${karmaresearch.vlog.artifactId} ${karmaresearch.vlog.version} jar ./lib/jvlog-local.jar From 82551d63ad56b9e4a45c69236c1ac7d039aa82c3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 20 May 2020 11:44:03 +0200 Subject: [PATCH 0869/1255] VLog: Fix detection of data statements during loading Fixes #173. --- .../reasoner/vlog/VLogKnowledgeBase.java | 2 +- .../reasoner/vlog/VLogKnowledgeBaseTest.java | 75 +++++++++++++++++++ 2 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java index 6af5a9c39..9f41cc7c9 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java @@ -84,7 +84,7 @@ public class VLogKnowledgeBase { } boolean hasData() { - return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); + return !this.edbPredicates.isEmpty() || !this.aliasedEdbPredicates.isEmpty(); } public boolean hasRules() { diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java new file mode 100644 index 000000000..81ccbc147 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class VLogKnowledgeBaseTest { + private KnowledgeBase knowledgeBase = new KnowledgeBase(); + private Predicate p = Expressions.makePredicate("P", 1); + private Predicate q = Expressions.makePredicate("Q", 1); + private UniversalVariable x = Expressions.makeUniversalVariable("x"); + private AbstractConstant c = Expressions.makeAbstractConstant("c"); + private Fact fact = Expressions.makeFact(p, c); + private PositiveLiteral literal = Expressions.makePositiveLiteral(p, x); + private Rule rule = Expressions.makeRule(literal, literal); + + @Test + public void hasData_noData_returnsFalse() { + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertFalse(vKB.hasData()); + } + + @Test + public void hasData_noAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } + + @Test + public void hasData_onlyAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } + + @Test + public void hasData_bothUnaliasedAndAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(Expressions.makeFact(q, c)); + knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } +} From a71d91464160dedd7cd32b9a3cc5a06dabcc8ae7 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 20 May 2020 11:51:36 +0200 Subject: [PATCH 0870/1255] added developers Larry and Ali --- pom.xml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 5cff39168..391c9de20 100644 --- a/pom.xml +++ b/pom.xml @@ -20,10 +20,10 @@ rulewerk-core rulewerk-vlog rulewerk-rdf - rulewerk-examples rulewerk-owlapi rulewerk-graal rulewerk-parser + rulewerk-examples rulewerk-client coverage @@ -63,6 +63,16 @@ Maximilian Marx maximilian.marx@tu-dresden.de + + larry + Larry González + larry.gonzalez@tu-dresden.de + + + ali + Ali Elhalawati + ali.elhalawati@tu-dresden.de + From 0e34e3a505175e7ebd915b035bd29ee2292ce0b7 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 20 May 2020 12:30:59 +0200 Subject: [PATCH 0871/1255] VLog: Avoid duplicated code --- .../reasoner/vlog/VLogInMemoryDataSource.java | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java index 9e5fabd64..c6952d09f 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -37,22 +37,6 @@ public VLogInMemoryDataSource(final int arity, final int initialCapacity) { this.data = new String[initialCapacity][arity]; } - /** - * Transforms a constant name in a format suitable for the - * reasoner. The default implementation assumes the VLog backend. - * @param constantName the name of the constant - * - * @return a transformed version of constantName that is suitable for the Reasoner. - */ - protected String transformConstantName(String constantName) { - if (!constantName.startsWith("\"") && constantName.contains(":")) { - // enclose IRIs with brackets - return "<" + constantName + ">"; - } - // it's either a datatype literal, or a relative IRI, leave it unchanged - return constantName; - } - /** * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. @@ -68,7 +52,7 @@ public void addTuple(final String... constantNames) { } this.data[this.nextEmptyTuple] = new String[this.arity]; for (int i = 0; i < this.arity; i++) { - this.data[this.nextEmptyTuple][i] = transformConstantName(constantNames[i]); + this.data[this.nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); } this.nextEmptyTuple++; } From 74ab5a9ee3fe7e2c01291a9ee3fa1462c558e4df Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 20 May 2020 12:41:07 +0200 Subject: [PATCH 0872/1255] eliminate duplicated code for getting VLog string representation of IRI --- .../reasoner/vlog/TermToVLogConverter.java | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 90496ff76..13a078076 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -1,5 +1,11 @@ package org.semanticweb.rulewerk.reasoner.vlog; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; + /*- * #%L * Rulewerk VLog Reasoner Support @@ -22,11 +28,6 @@ import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.TermType; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; @@ -48,7 +49,7 @@ class TermToVLogConverter implements TermVisitor { * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. */ @Override - public karmaresearch.vlog.Term visit(AbstractConstant term) { + public karmaresearch.vlog.Term visit(final AbstractConstant term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); } @@ -57,7 +58,7 @@ public karmaresearch.vlog.Term visit(AbstractConstant term) { * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. */ @Override - public karmaresearch.vlog.Term visit(DatatypeConstant term) { + public karmaresearch.vlog.Term visit(final DatatypeConstant term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); } @@ -67,7 +68,7 @@ public karmaresearch.vlog.Term visit(DatatypeConstant term) { * {@link karmaresearch.vlog.Term.TermType#CONSTANT}. */ @Override - public karmaresearch.vlog.Term visit(LanguageStringConstant term) { + public karmaresearch.vlog.Term visit(final LanguageStringConstant term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); } @@ -77,16 +78,12 @@ public karmaresearch.vlog.Term visit(LanguageStringConstant term) { * @param constant * @return VLog constant string */ - public static String getVLogNameForConstant(Constant constant) { + public static String getVLogNameForConstant(final Constant constant) { + final String constantName = constant.getName(); if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - String rulewerkConstantName = constant.getName(); - if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > - return "<" + rulewerkConstantName + ">"; - } else { // keep relative IRIs unchanged - return rulewerkConstantName; - } + return getVLogNameForIRI(constantName); } else { // datatype literal - return constant.getName(); + return constantName; } } @@ -96,7 +93,7 @@ public static String getVLogNameForConstant(Constant constant) { * @param named null * @return VLog constant string */ - public static String getVLogNameForNamedNull(NamedNull namedNull) { + public static String getVLogNameForNamedNull(final NamedNull namedNull) { if (namedNull instanceof RenamedNamedNull) { return namedNull.getName(); } else { @@ -111,10 +108,15 @@ public static String getVLogNameForNamedNull(NamedNull namedNull) { * @param rulewerkConstantName * @return VLog constant string */ - public static String getVLogNameForConstantName(String rulewerkConstantName) { + public static String getVLogNameForConstantName(final String rulewerkConstantName) { if (rulewerkConstantName.startsWith("\"")) { // keep datatype literal strings unchanged return rulewerkConstantName; - } else if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > + } else + return getVLogNameForIRI(rulewerkConstantName); + } + + private static String getVLogNameForIRI(final String rulewerkConstantName) { + if (rulewerkConstantName.contains(":")) { // enclose absolute IRIs with < > return "<" + rulewerkConstantName + ">"; } else { // keep relative IRIs unchanged return rulewerkConstantName; @@ -126,7 +128,7 @@ public static String getVLogNameForConstantName(String rulewerkConstantName) { * same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. */ @Override - public karmaresearch.vlog.Term visit(UniversalVariable term) { + public karmaresearch.vlog.Term visit(final UniversalVariable term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); } @@ -135,7 +137,7 @@ public karmaresearch.vlog.Term visit(UniversalVariable term) { * the same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. */ @Override - public karmaresearch.vlog.Term visit(ExistentialVariable term) { + public karmaresearch.vlog.Term visit(final ExistentialVariable term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "!" + term.getName()); } @@ -144,7 +146,7 @@ public karmaresearch.vlog.Term visit(ExistentialVariable term) { * name and type {@link karmaresearch.vlog.Term.TermType#BLANK}. */ @Override - public karmaresearch.vlog.Term visit(NamedNull term) { + public karmaresearch.vlog.Term visit(final NamedNull term) { return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); } From 6a128c9fb194d17424bbaba19403f5355f5eca62 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 21 May 2020 22:03:12 +0200 Subject: [PATCH 0873/1255] update reference to released vlog-java.1.3.3 --- rulewerk-vlog/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index d8438ed7d..716c5e10d 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -17,7 +17,7 @@ Bindings for the VLog reasoner backend. - 1.3.3-snapshot + 1.3.3 vlog-java From b0bb1d82b85090060f62897757563ef3e6e1df39 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 21 May 2020 22:56:53 +0200 Subject: [PATCH 0874/1255] update release notes --- RELEASE-NOTES.md | 244 ++++++++++++++++++++++++----------------------- 1 file changed, 124 insertions(+), 120 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 9f52a1d57..8a2d82386 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,120 +1,124 @@ -Rulewerk Release Notes -==================== - -Rulewerk v0.6.0 -------------- - -Breaking changes: -* VLog4j is now called Rulewerk. -* In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no - longer exist. It can be replaced by - `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` -* The `FileDataSource` constructor and those of derived classes now - take the path to a file instead of `File` object. -* The VLog backend has been moved to a new `rulewerk-vlog` module, - changing several import paths. `Reasoner.getInstance()` is - gone. Furthermore, InMemoryDataSource has become an abstract class, - use VLogInMemoryDataSource where applicable. - -New features: -* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` -* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` -* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` -* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` -* Rules files may import other rules files using `@import` and - `@import-relative`, where the latter resolves relative IRIs using - the current base IRI, unless the imported file explicitly specifies - a different one. -* Named nulls of the form `_:name` are now allowed during parsing (but - may not occur in rule bodies). They are renamed to assure that they - are distinct on a per-file level. -* The parser allows custom directives to be implemented, and a certain - set of delimiters allows for custom literal expressions. - -Other improvements: -* Prefix declarations are now kept as part of the Knowledge Base and - are used to abbreviate names when exporting inferences. - - -VLog4j v0.5.0 -------------- - -Breaking changes: -* The data model for rules has been refined and changed: - * Instead of Constant, specific types of constants are used to capture abtract and data values - * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification - * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes - * Methods to access terms now use Java Streams and are unified across syntactic objects -* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` - -New features: -* New module vlog4j-client provides a stand-alone command line client jar for VLog4j -* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki -* The parser behaviour for data source declarations and certain datatype literals can be customised. - -Other improvements: -* Data model is better aligned with syntax supported by parser -* Java object Statements (rules, facts, datasource declarations) String representation is parseable -* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) -* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) -* Cobertura test coverage tool has been replaced by JaCoCo - -Bugfixes: -* Acyclicity checks work again without calling reason() first (issue #128) -* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) -* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) -* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. - -VLog4j v0.4.0 -------------- - -Breaking changes: -* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) -* The EdbIdbSeparation is obsolete and does no longer exist -* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier -* A new interface Fact has replaced the overly general PositiveLiteral in many places - -New features: -* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java -* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) -* New InMemoryDataSource for efficient in-memory fact loading -* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner -* Modifications to the knowledge base are taken into account by the reasoner -* New and updated example programs to illustrate use of syntax - -Other improvements: -* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) -* Faster and more memory-efficient loading of facts -* Better error reporting; improved use of exceptions -* Better logging, especially on the INFO level -* Better code structure and testing - -Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now - - -VLog4j v0.3.0 -------------- - -New features: -* Support for Graal data structures (conversion from Graal model to VLog model objects) -* Stratified negation: rule bodies are conjunctions of positive or negated literals -* SPARQL-based data sources: load remote data from SPARQL endpoints -* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined - -VLog4j v0.2.0 -------------- - -New features: -* supporting File data sources of N-Triples format (.nt file extension) -* supporting g-zipped data source files (.csv.gz, .nt.gz) - -VLog4j v0.1.0 -------------- - -Initial release. - -New features: -* Essential data models for rules and facts, and essential reasoner functionality -* support for reading from RDF files -* support for converting rules from OWL ontology, loaded with the OWL API +Rulewerk Release Notes +==================== + +Rulewerk v0.6.0 +------------- + +Breaking changes: +* VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names + of the project have changed. +* In the examples package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no + longer exist. It can be replaced by + `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` +* The `FileDataSource` constructor and those of child classes (`CsvFileDataSource`, `RdfFileDataSource`) + now take the String path to a file instead of `File` object. +* The VLog backend has been moved to a new `rulewerk-vlog` module, + changing several import paths. `Reasoner.getInstance()` is + gone. Furthermore, `InMemoryDataSource` has become an abstract class, + use `VLogInMemoryDataSource` where applicable. + +New features: +* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` +* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` +* `Reasoner.getCorrectness()` returns the correctness result of the last reasoning task. +* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` +* Rules files may import other rules files using `@import` and + `@import-relative`, where the latter resolves relative IRIs using + the current base IRI, unless the imported file explicitly specifies + a different one. +* Named nulls of the form `_:name` are now allowed during parsing (but + may not occur in rule bodies). They are renamed to assure that they + are distinct on a per-file level. +* The parser allows custom directives to be implemented, and a certain + set of delimiters allows for custom literal expressions. + +Other improvements: +* Prefix declarations are now kept as part of the Knowledge Base and + are used to abbreviate names when exporting inferences. + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + +VLog4j v0.5.0 +------------- + +Breaking changes: +* The data model for rules has been refined and changed: + * Instead of Constant, specific types of constants are used to capture abtract and data values + * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification + * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes + * Methods to access terms now use Java Streams and are unified across syntactic objects +* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` + +New features: +* New module vlog4j-client provides a stand-alone command line client jar for VLog4j +* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki +* The parser behaviour for data source declarations and certain datatype literals can be customised. + +Other improvements: +* Data model is better aligned with syntax supported by parser +* Java object Statements (rules, facts, datasource declarations) String representation is parseable +* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) +* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) +* Cobertura test coverage tool has been replaced by JaCoCo + +Bugfixes: +* Acyclicity checks work again without calling reason() first (issue #128) +* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) +* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) +* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. + +VLog4j v0.4.0 +------------- + +Breaking changes: +* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) +* The EdbIdbSeparation is obsolete and does no longer exist +* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier +* A new interface Fact has replaced the overly general PositiveLiteral in many places + +New features: +* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java +* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) +* New InMemoryDataSource for efficient in-memory fact loading +* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner +* Modifications to the knowledge base are taken into account by the reasoner +* New and updated example programs to illustrate use of syntax + +Other improvements: +* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) +* Faster and more memory-efficient loading of facts +* Better error reporting; improved use of exceptions +* Better logging, especially on the INFO level +* Better code structure and testing + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + + +VLog4j v0.3.0 +------------- + +New features: +* Support for Graal data structures (conversion from Graal model to VLog model objects) +* Stratified negation: rule bodies are conjunctions of positive or negated literals +* SPARQL-based data sources: load remote data from SPARQL endpoints +* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined + +VLog4j v0.2.0 +------------- + +New features: +* supporting File data sources of N-Triples format (.nt file extension) +* supporting g-zipped data source files (.csv.gz, .nt.gz) + +VLog4j v0.1.0 +------------- + +Initial release. + +New features: +* Essential data models for rules and facts, and essential reasoner functionality +* support for reading from RDF files +* support for converting rules from OWL ontology, loaded with the OWL API From 3e6941f5533a375081325c21950c964529da4274 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 21 May 2020 23:16:09 +0200 Subject: [PATCH 0875/1255] update to release version 0.6.0 --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index c91db4c28..06389c423 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 coverage diff --git a/pom.xml b/pom.xml index 391c9de20..28934543e 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index a3760ec8b..1c820bb25 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-client diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 36a019c2c..ee5836588 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 84344dd75..1a255eea2 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index f58afbd16..93c167727 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 6e9fab4c3..28e8e7fd8 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index 87da11c8d..c76439ecd 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index efed746b6..4716807f8 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 716c5e10d..c364a64d1 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0-SNAPSHOT + 0.6.0 rulewerk-vlog From c234d6c99bbcc834dc141f97bbd4df55b3430960 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 21 May 2020 23:25:44 +0200 Subject: [PATCH 0876/1255] update README to point to release 0.6.0 --- README.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 11ee4e1fc..47b38c0f1 100644 --- a/README.md +++ b/README.md @@ -9,16 +9,19 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.5.0 and was released as *vlog4j* (all future releases will be published as *rulewerk*). The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.6.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` - org.semanticweb.vlog4j - vlog4j-core - 0.5.0 + org.semanticweb.rulewerk + rulewerk-core + 0.6.0 ``` +Previous to version `0.6.0`, *rulewerk* project name was *vlog4j*. Older versions released under name *vlog4j* have `org.semanticweb.vlog4j` and `vlog4j-core`, the latest version being version `0.5.0`. + + You need to use Java 1.8 or above. Available modules include: * **rulewerk-core**: essential data models for rules and facts, and essential reasoner functionality From b90e209ea23324974a491d26ef1a2f7d2ecdb584 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 22 May 2020 01:23:32 +0200 Subject: [PATCH 0877/1255] update to snapshot version --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 06389c423..b65b563fa 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT coverage diff --git a/pom.xml b/pom.xml index 28934543e..47efeb4a6 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 1c820bb25..64f043d72 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-client diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index ee5836588..34701907a 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 1a255eea2..d4abb343b 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index 93c167727..e5621cfbb 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 28e8e7fd8..d351dd2c3 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index c76439ecd..75487af58 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index 4716807f8..16e796b43 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index c364a64d1..5e767200f 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.6.0 + 0.7.0-SNAPSHOT rulewerk-vlog From 506af489b03821d9059609adf5ffd782e70a78b2 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Fri, 22 May 2020 01:45:10 +0200 Subject: [PATCH 0878/1255] fix javadoc --- .../rulewerk/core/reasoner/KnowledgeBase.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 7765364c6..ad03ba16e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -195,7 +195,7 @@ public Void visit(final DataSourceDeclaration statement) { /** * Registers a listener for changes on the knowledge base * - * @param listener + * @param listener a KnowledgeBaseListener */ public void addListener(final KnowledgeBaseListener listener) { this.listeners.add(listener); @@ -204,7 +204,7 @@ public void addListener(final KnowledgeBaseListener listener) { /** * Unregisters given listener from changes on the knowledge base * - * @param listener + * @param listener KnowledgeBaseListener */ public void deleteListener(final KnowledgeBaseListener listener) { this.listeners.remove(listener); @@ -478,7 +478,7 @@ public void importRulesFile(File file, AdditionalInputParser parseFunction) throws RulewerkException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); - boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); + boolean isNewFile = this.importedFilePaths.add(file.getCanonicalPath()); Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); try (InputStream stream = new FileInputStream(file)) { @@ -564,22 +564,22 @@ public String unresolveAbsoluteIri(String iri) { * * @param stream the {@link OutputStream} to serialise to. * - * @throws IOException + * @throws IOException if an I/O error occurs while writing to given output stream */ public void writeKnowledgeBase(OutputStream stream) throws IOException { stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); - for (DataSourceDeclaration dataSource : getDataSourceDeclarations()) { + for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { stream.write(Serializer.getString(dataSource).getBytes()); stream.write('\n'); } - for (Rule rule : getRules()) { + for (Rule rule : this.getRules()) { stream.write(Serializer.getString(rule).getBytes()); stream.write('\n'); } - for (Fact fact : getFacts()) { + for (Fact fact : this.getFacts()) { stream.write(Serializer.getFactString(fact).getBytes()); stream.write('\n'); } @@ -594,7 +594,7 @@ public void writeKnowledgeBase(OutputStream stream) throws IOException { */ public void writeKnowledgeBase(String filePath) throws IOException { try (OutputStream stream = new FileOutputStream(filePath)) { - writeKnowledgeBase(stream); + this.writeKnowledgeBase(stream); } } } From f79c14cfd4f9e5fdef7e9f14705251e865047a95 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 4 Jun 2020 17:02:23 +0200 Subject: [PATCH 0879/1255] Parser: Allow absolute IRIs in Rule Bodies Fixes #178. --- .../rulewerk/parser/javacc/JavaCCParser.jj | 2 +- .../rulewerk/parser/RuleParserTest.java | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 0d633b1ad..7a80fd52c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -474,7 +474,7 @@ MORE : { "@": DIRECTIVE } -< DEFAULT, TERM, DIRECTIVE_ARGUMENTS > MORE : { +< DEFAULT, TERM, DIRECTIVE_ARGUMENTS, BODY > MORE : { "<" { pushState(); } : ABSOLUTE_IRI } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index f3e030e02..271a49598 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -448,6 +448,21 @@ public void predicateAbsoluteIRITest() throws ParsingException { assertEquals(f, f2); } + @Test + public void parse_absoluteIriInRuleHead_succeeds() throws ParsingException { + RuleParser.parseRule("(?x) :- B(?x), C(?x) ."); + } + + @Test + public void parse_absoluteIriInRuleBody_succeeds() throws ParsingException { + RuleParser.parseRule("A(?x) :- B(?x), (?x) ."); + } + + @Test + public void parse_absoluteIrisInRule_succeeds() throws ParsingException { + RuleParser.parseRule("(?x) :- B(?x), (?x) ."); + } + @Test public void testCustomDatatype() throws ParsingException { final String typename = "http://example.org/#test"; From 7fa2ed7d6c6878881b1b25fac57ae29bf6698611 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 10 Jun 2020 17:41:10 +0200 Subject: [PATCH 0880/1255] Improved example code to count only proper triangles --- RELEASE-NOTES.md | 11 +++++++++-- .../examples/InMemoryGraphAnalysisExample.java | 10 ++++++---- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 8a2d82386..eb880ff50 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,8 +1,15 @@ Rulewerk Release Notes -==================== +====================== + +Rulewerk v0.7.0 +--------------- + +Other improvements: +* InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where + two or more edges are the same. Rulewerk v0.6.0 -------------- +--------------- Breaking changes: * VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 838ea639e..9bec5600e 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -78,7 +78,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio + "connected(v1) ." // + "connected(?X) :- connected(?Y), biedge(?Y,?X) ." // + "unreachable(?X) :- vertex(?X), ~connected(?X) . " // - + "triangle(?X, ?Y, ?Z) :- biedge(?X,?Y), biedge(?Y, ?Z), biedge(?Z,?X) ."; + + "triangle(?X, ?Y, ?Z) :- biedge(?X,?Y), biedge(?Y, ?Z), biedge(?Z,?X) ." // + + "loop(?X,?X) :- edge(?X,?X) . " // + + "properTriangle(?X, ?Y, ?Z) :- triangle(?X,?Y,?Z), ~loop(?X,?Y), ~loop(?Y, ?Z), ~loop(?Z, ?X) . "; final KnowledgeBase kb = RuleParser.parse(rules); kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("vertex", 1), vertices)); @@ -88,14 +90,14 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) + final long unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) .getCount(); - final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + final long triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("properTriangle(?X,?Y,?Z)")) .getCount(); System.out .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); - System.out.println("Number of bi-directional triangles: " + (triangles / 6)); + System.out.println("Number of proper bi-directional triangles: " + (triangles / 6) + " (found in " + triangles + " matches due to symmetry.)"); } } From ae39a8ac83fc0643d67ce15e1ee19e122cc63a54 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 16:17:50 +0200 Subject: [PATCH 0881/1255] Replace VLogQueryResultIterator by faster implementation * New class avoids use of VLog's TermQueryResultIterator * Hence no intermediate string representations of results * Two forms of caching reduce dictionary lookups and number of term objects in RuleWerk --- .../vlog/VLogFastQueryResultIterator.java | 183 ++++++++++++++++++ .../vlog/VLogQueryResultIterator.java | 70 ------- .../rulewerk/reasoner/vlog/VLogReasoner.java | 102 ++++++---- .../reasoner/vlog/VLogToModelConverter.java | 2 +- 4 files changed, 253 insertions(+), 104 deletions(-) create mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java delete mode 100644 rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java new file mode 100644 index 000000000..00a53bba9 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -0,0 +1,183 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; + +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.VLog; + +/** + * Iterates trough all answers to a query. An answer to a query is a + * {@link QueryResult}. Each query answer is distinct. + * + * @author Markus Kroetzsch + * + */ +public class VLogFastQueryResultIterator implements QueryResultIterator { + + /** + * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used + * here for mapping VLog ids to terms. + * + * @author Markus Kroetzsch + * + * @param + * @param + */ + public static class SimpleLruMap extends LinkedHashMap { + private static final long serialVersionUID = 7151535464938775359L; + private int maxCapacity; + + public SimpleLruMap(int initialCapacity, int maxCapacity) { + super(initialCapacity, 0.75f, true); + this.maxCapacity = maxCapacity; + } + + @Override + protected boolean removeEldestEntry(Map.Entry eldest) { + return size() >= this.maxCapacity; + } + } + + /** + * The internal result iterator of VLog, returning numeric ids only. + */ + private final karmaresearch.vlog.QueryResultIterator vLogQueryResultIterator; + /** + * The VLog instance. Used for resolving numeric ids to term names. + */ + private final VLog vlog; + /** + * VLog ids of the previous tuple, with the last id omitted (since it is not + * useful in caching). + */ + private long[] prevIds = null; + /** + * RuleWerk terms corresponding to the previously fetched tuple, with the last + * term omitted. + */ + private Term[] prevTerms = null; + /** + * True if this is the first result that is returned. + */ + boolean firstResult = true; + /** + * Size of the tuples returned in this result. + */ + int resultSize = -1; + /** + * LRU cache mapping ids to terms. + */ + final SimpleLruMap termCache; + + private final Correctness correctness; + + /** + * Create a new {@link VLogFastQueryResultIterator}. + * + * @param queryResultIterator + * @param materialisationState + * @param vLog + */ + public VLogFastQueryResultIterator(final karmaresearch.vlog.QueryResultIterator queryResultIterator, + final Correctness materialisationState, final VLog vLog) { + this.vLogQueryResultIterator = queryResultIterator; + this.correctness = materialisationState; + this.vlog = vLog; + this.termCache = new SimpleLruMap(256, 16384); + } + + @Override + public boolean hasNext() { + return this.vLogQueryResultIterator.hasNext(); + } + + @Override + public QueryResult next() { + final Term[] terms; + long[] idTuple = vLogQueryResultIterator.next(); + terms = new Term[idTuple.length]; + + if (firstResult) { + resultSize = terms.length; + prevIds = new long[resultSize - 1]; + prevTerms = new Term[resultSize - 1]; + } + + int i = 0; + for (long id : idTuple) { + if (!firstResult && i < resultSize - 1 && prevIds[i] == id) { + terms[i] = prevTerms[i]; + } else { + Term term = this.termCache.get(id); + if (term == null) { + try { + String s = vlog.getConstant(id); + // This internal handling is copied from VLog's code in {@link + // karmaresearch.vlog.TermQueryResultIterator}. + // TODO: the string operation to make null names should possibly be provided by + // VLog rather than being hardcoded here? + if (s == null) { + term = new NamedNullImpl( + "" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); + } else { + term = VLogToModelConverter.toConstant(s); + } + } catch (NotStartedException e) { + // Should not happen, we just did a query ... + throw new RuntimeException(e); + } + this.termCache.put(id, term); + } + terms[i] = term; + if (i < resultSize - 1) { + prevTerms[i] = term; + prevIds[i] = id; + } + } + i++; + } + + firstResult = false; + return new QueryResultImpl(List.of(terms)); + } + + @Override + public void close() { + this.vLogQueryResultIterator.close(); + } + + @Override + public Correctness getCorrectness() { + return this.correctness; + } + +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java deleted file mode 100644 index 35dc7f75c..000000000 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultIterator.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.semanticweb.rulewerk.reasoner.vlog; - -/* - * #%L - * Rulewerk VLog Reasoner Support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.QueryResult; -import org.semanticweb.rulewerk.core.reasoner.Correctness; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; - -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; - -/** - * Iterates trough all answers to a query. An answer to a query is a - * {@link QueryResult}. Each query answer is distinct. - * - * @author Irina Dragoste - * - */ -public class VLogQueryResultIterator implements QueryResultIterator { - - private final TermQueryResultIterator vLogTermQueryResultIterator; - - private final Correctness correctness; - - public VLogQueryResultIterator(final TermQueryResultIterator termQueryResultIterator, - final Correctness materialisationState) { - this.vLogTermQueryResultIterator = termQueryResultIterator; - this.correctness = materialisationState; - } - - @Override - public boolean hasNext() { - return this.vLogTermQueryResultIterator.hasNext(); - } - - @Override - public QueryResult next() { - final Term[] vLogQueryResult = this.vLogTermQueryResultIterator.next(); - return VLogToModelConverter.toQueryResult(vLogQueryResult); - } - - @Override - public void close() { - this.vLogTermQueryResultIterator.close(); - } - - @Override - public Correctness getCorrectness() { - return this.correctness; - } - -} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index ea35c37f8..35c735e73 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -79,7 +79,7 @@ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); final KnowledgeBase knowledgeBase; - final VLog vLog = new VLog(); + public final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; @@ -158,18 +158,18 @@ void load() throws IOException { validateNotClosed(); switch (this.reasonerState) { - case KB_NOT_LOADED: - loadKnowledgeBase(); - break; - case KB_LOADED: - case MATERIALISED: - // do nothing, all KB is already loaded - break; - case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); - default: - break; + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; } } @@ -328,23 +328,23 @@ public boolean reason() throws IOException { validateNotClosed(); switch (this.reasonerState) { - case KB_NOT_LOADED: - load(); - runChase(); - break; - case KB_LOADED: - runChase(); - break; - case KB_CHANGED: - resetReasoner(); - load(); - runChase(); - break; - case MATERIALISED: - runChase(); - break; - default: - break; + case KB_NOT_LOADED: + load(); + runChase(); + break; + case KB_LOADED: + runChase(); + break; + case KB_CHANGED: + resetReasoner(); + load(); + runChase(); + break; + case MATERIALISED: + runChase(); + break; + default: + break; } return this.reasoningCompleted; @@ -388,9 +388,12 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - TermQueryResultIterator stringQueryResultIterator; + karmaresearch.vlog.QueryResultIterator queryResultIterator; + try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); + final int predicateId = this.vLog.getPredicateId(vLogAtom.getPredicate()); + final long[] terms = extractTerms(vLogAtom.getTerms()); + queryResultIterator = this.vLog.query(predicateId, terms, true, filterBlanks); } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { @@ -400,7 +403,40 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } logWarningOnCorrectness(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + return new VLogFastQueryResultIterator(queryResultIterator, this.correctness, this.vLog); + } + + /** + * Utility method copied from {@link karmaresearch.vlog.VLog}. + * + * @FIXME This should be provided by VLog and made visible to us rather than + * being copied here. + * @param terms + * @return + * @throws NotStartedException + */ + private long[] extractTerms(karmaresearch.vlog.Term[] terms) throws NotStartedException { + ArrayList variables = new ArrayList<>(); + long[] longTerms = new long[terms.length]; + for (int i = 0; i < terms.length; i++) { + if (terms[i].getTermType() == karmaresearch.vlog.Term.TermType.VARIABLE) { + boolean found = false; + for (int j = 0; j < variables.size(); j++) { + if (variables.get(j).equals(terms[i].getName())) { + found = true; + longTerms[i] = -j - 1; + break; + } + } + if (!found) { + variables.add(terms[i].getName()); + longTerms[i] = -variables.size(); + } + } else { + longTerms[i] = this.vLog.getOrAddConstantId(terms[i].getName()); + } + } + return longTerms; } @Override diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index c68d68234..f313fdefd 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -101,7 +101,7 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { * @param vLogConstantName the string name used by VLog * @return {@link Constant} object */ - private static Constant toConstant(String vLogConstantName) { + static Constant toConstant(String vLogConstantName) { final Constant constant; if (vLogConstantName.charAt(0) == Serializer.LESS_THAN && vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { From 5ed54877f04b12ce225b783a0d69cb2b7358f400 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 16:19:28 +0200 Subject: [PATCH 0882/1255] Mention improvement --- RELEASE-NOTES.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index eb880ff50..79f9edb8a 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -4,6 +4,9 @@ Rulewerk Release Notes Rulewerk v0.7.0 --------------- +New features: +* Significant speedup in iterating over query results + Other improvements: * InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where two or more edges are the same. From 5412efcba88e50b52757b4ee203835074c973b36 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 18:11:03 +0200 Subject: [PATCH 0883/1255] Arrays.asList seems faster than List.of --- .../reasoner/vlog/VLogFastQueryResultIterator.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 00a53bba9..875ad3b43 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -1,7 +1,7 @@ package org.semanticweb.rulewerk.reasoner.vlog; +import java.util.Arrays; import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; /* @@ -42,7 +42,7 @@ * */ public class VLogFastQueryResultIterator implements QueryResultIterator { - + /** * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used * here for mapping VLog ids to terms. @@ -74,7 +74,7 @@ protected boolean removeEldestEntry(Map.Entry eldest) { /** * The VLog instance. Used for resolving numeric ids to term names. */ - private final VLog vlog; + private final VLog vLog; /** * VLog ids of the previous tuple, with the last id omitted (since it is not * useful in caching). @@ -111,7 +111,7 @@ public VLogFastQueryResultIterator(final karmaresearch.vlog.QueryResultIterator final Correctness materialisationState, final VLog vLog) { this.vLogQueryResultIterator = queryResultIterator; this.correctness = materialisationState; - this.vlog = vLog; + this.vLog = vLog; this.termCache = new SimpleLruMap(256, 16384); } @@ -140,7 +140,7 @@ public QueryResult next() { Term term = this.termCache.get(id); if (term == null) { try { - String s = vlog.getConstant(id); + String s = vLog.getConstant(id); // This internal handling is copied from VLog's code in {@link // karmaresearch.vlog.TermQueryResultIterator}. // TODO: the string operation to make null names should possibly be provided by @@ -167,7 +167,7 @@ public QueryResult next() { } firstResult = false; - return new QueryResultImpl(List.of(terms)); + return new QueryResultImpl(Arrays.asList(terms)); } @Override From 39df1cc3a25d0b95f758a34d61b3133687e450a6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 20:52:00 +0200 Subject: [PATCH 0884/1255] Disable caching for unary queries --- .../vlog/VLogFastQueryResultIterator.java | 45 ++++++++++++------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 875ad3b43..c42a7c9d0 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -42,7 +42,7 @@ * */ public class VLogFastQueryResultIterator implements QueryResultIterator { - + /** * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used * here for mapping VLog ids to terms. @@ -136,25 +136,12 @@ public QueryResult next() { for (long id : idTuple) { if (!firstResult && i < resultSize - 1 && prevIds[i] == id) { terms[i] = prevTerms[i]; + } else if (resultSize == 1) { // caching pointless for unary queries + terms[i] = computeTerm(id); } else { Term term = this.termCache.get(id); if (term == null) { - try { - String s = vLog.getConstant(id); - // This internal handling is copied from VLog's code in {@link - // karmaresearch.vlog.TermQueryResultIterator}. - // TODO: the string operation to make null names should possibly be provided by - // VLog rather than being hardcoded here? - if (s == null) { - term = new NamedNullImpl( - "" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); - } else { - term = VLogToModelConverter.toConstant(s); - } - } catch (NotStartedException e) { - // Should not happen, we just did a query ... - throw new RuntimeException(e); - } + term = computeTerm(id); this.termCache.put(id, term); } terms[i] = term; @@ -170,6 +157,30 @@ public QueryResult next() { return new QueryResultImpl(Arrays.asList(terms)); } + /** + * Compute the {@link Term} for a given VLog id. + * + * @param id + * @return + */ + Term computeTerm(long id) { + try { + String s = vLog.getConstant(id); + // This internal handling is copied from VLog's code in {@link + // karmaresearch.vlog.TermQueryResultIterator}. + // TODO: the string operation to make null names should possibly be provided by + // VLog rather than being hardcoded here? + if (s == null) { + return new NamedNullImpl("" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); + } else { + return VLogToModelConverter.toConstant(s); + } + } catch (NotStartedException e) { + // Should not happen, we just did a query ... + throw new RuntimeException(e); + } + } + @Override public void close() { this.vLogQueryResultIterator.close(); From 222e994e4cc067b3e9954093f1ed16ae2587a93a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 21:45:41 +0200 Subject: [PATCH 0885/1255] Optimise caching strategy for ordered data --- .../vlog/VLogFastQueryResultIterator.java | 76 ++++++++++++++----- 1 file changed, 58 insertions(+), 18 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index c42a7c9d0..2a72d738f 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -1,8 +1,7 @@ package org.semanticweb.rulewerk.reasoner.vlog; import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.Map; +import java.util.HashMap; /* * #%L @@ -43,27 +42,66 @@ */ public class VLogFastQueryResultIterator implements QueryResultIterator { +// /** +// * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used +// * here for mapping VLog ids to terms. +// * +// * @author Markus Kroetzsch +// * +// * @param +// * @param +// */ +// static class SimpleLruMap extends LinkedHashMap { +// private static final long serialVersionUID = 7151535464938775359L; +// private int maxCapacity; +// +// public SimpleLruMap(int initialCapacity, int maxCapacity) { +// super(initialCapacity, 0.75f, true); +// this.maxCapacity = maxCapacity; +// } +// +// @Override +// protected boolean removeEldestEntry(Map.Entry eldest) { +// return size() >= this.maxCapacity; +// } +// } + /** - * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used - * here for mapping VLog ids to terms. + * Simple cache for finding terms for VLog ids that is optimised for the case + * where ids are inserted in a mostly ordered fashion. An LRU strategy is highly + * ineffective for this as soon as the cache capacity is smaller than the number + * of repeatedly used terms, since the cache entries there are always pushed out + * before being needed again. This implementation will at least cache a maximal + * initial fragment in such cases. It is also faster to write and requires less + * memory. * * @author Markus Kroetzsch * - * @param - * @param */ - public static class SimpleLruMap extends LinkedHashMap { - private static final long serialVersionUID = 7151535464938775359L; - private int maxCapacity; + static class OrderedTermCache { + final private HashMap terms = new HashMap<>(); + final int maxCapacity; + private long maxId = -1; - public SimpleLruMap(int initialCapacity, int maxCapacity) { - super(initialCapacity, 0.75f, true); - this.maxCapacity = maxCapacity; + public OrderedTermCache(int capacity) { + this.maxCapacity = capacity; } - @Override - protected boolean removeEldestEntry(Map.Entry eldest) { - return size() >= this.maxCapacity; + public Term get(long id) { + if (id > maxId) { + return null; + } else { + return terms.get(id); + } + } + + public void put(long id, Term term) { + if (terms.size() < maxCapacity) { + terms.put(id, term); + if (id > maxId) { + maxId = id; + } + } } } @@ -94,9 +132,10 @@ protected boolean removeEldestEntry(Map.Entry eldest) { */ int resultSize = -1; /** - * LRU cache mapping ids to terms. + * Cache mapping ids to terms. */ - final SimpleLruMap termCache; + // final SimpleLruMap termCache; + final OrderedTermCache termCache; private final Correctness correctness; @@ -112,7 +151,8 @@ public VLogFastQueryResultIterator(final karmaresearch.vlog.QueryResultIterator this.vLogQueryResultIterator = queryResultIterator; this.correctness = materialisationState; this.vLog = vLog; - this.termCache = new SimpleLruMap(256, 16384); + // this.termCache = new SimpleLruMap(256, 64000); + this.termCache = new OrderedTermCache(130000); } @Override From f548de8dcfcd66834af19512ddd949090bb0bb7b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 17 Jun 2020 22:04:41 +0200 Subject: [PATCH 0886/1255] vLog should not be public --- .../org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 35c735e73..8f17d699f 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -79,7 +79,7 @@ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); final KnowledgeBase knowledgeBase; - public final VLog vLog = new VLog(); + final VLog vLog = new VLog(); private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; From f491cf051542860ae25156e7c4a91e09573cd044 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 09:09:59 +0200 Subject: [PATCH 0887/1255] Change exception type as requested --- .../reasoner/vlog/VLogFastQueryResultIterator.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 2a72d738f..b3978303a 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -3,6 +3,8 @@ import java.util.Arrays; import java.util.HashMap; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + /* * #%L * Rulewerk VLog Reasoner Support @@ -215,9 +217,8 @@ Term computeTerm(long id) { } else { return VLogToModelConverter.toConstant(s); } - } catch (NotStartedException e) { - // Should not happen, we just did a query ... - throw new RuntimeException(e); + } catch (NotStartedException e) { // Should never happen, we just did a query ... + throw new RulewerkRuntimeException(e); } } From f782f7dce215c91af47dc6f593a6d0c542421e9f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 18:17:59 +0200 Subject: [PATCH 0888/1255] Even faster implementation --- .../vlog/VLogFastQueryResultIterator.java | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index b3978303a..6f0696987 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -2,6 +2,7 @@ import java.util.Arrays; import java.util.HashMap; +import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; @@ -116,15 +117,10 @@ public void put(long id, Term term) { */ private final VLog vLog; /** - * VLog ids of the previous tuple, with the last id omitted (since it is not + * VLog ids of the previous tuple, with the last id fixed to -1 (since it is never * useful in caching). */ private long[] prevIds = null; - /** - * RuleWerk terms corresponding to the previously fetched tuple, with the last - * term omitted. - */ - private Term[] prevTerms = null; /** * True if this is the first result that is returned. */ @@ -133,6 +129,10 @@ public void put(long id, Term term) { * Size of the tuples returned in this result. */ int resultSize = -1; + /** + * Previous tuple that was returned. + */ + Term[] prevTuple; /** * Cache mapping ids to terms. */ @@ -164,23 +164,25 @@ public boolean hasNext() { @Override public QueryResult next() { - final Term[] terms; - long[] idTuple = vLogQueryResultIterator.next(); - terms = new Term[idTuple.length]; + final long[] idTuple = vLogQueryResultIterator.next(); if (firstResult) { - resultSize = terms.length; - prevIds = new long[resultSize - 1]; - prevTerms = new Term[resultSize - 1]; + resultSize = idTuple.length; + prevTuple = new Term[resultSize]; + prevIds = new long[resultSize]; + Arrays.fill(prevIds, -1); // (practically) impossible id + firstResult = false; } + if (resultSize == 1) { // Caching is pointless for unary queries + return new QueryResultImpl(List.of(computeTerm(idTuple[0]))); + } + + // (Array.copyOf was slightly faster than System.arraycopy in tests) + final Term[] terms = Arrays.copyOf(prevTuple, resultSize); int i = 0; for (long id : idTuple) { - if (!firstResult && i < resultSize - 1 && prevIds[i] == id) { - terms[i] = prevTerms[i]; - } else if (resultSize == 1) { // caching pointless for unary queries - terms[i] = computeTerm(id); - } else { + if (prevIds[i] != id) { Term term = this.termCache.get(id); if (term == null) { term = computeTerm(id); @@ -188,14 +190,13 @@ public QueryResult next() { } terms[i] = term; if (i < resultSize - 1) { - prevTerms[i] = term; prevIds[i] = id; } } i++; } - firstResult = false; + prevTuple = terms; return new QueryResultImpl(Arrays.asList(terms)); } @@ -208,12 +209,11 @@ public QueryResult next() { Term computeTerm(long id) { try { String s = vLog.getConstant(id); - // This internal handling is copied from VLog's code in {@link - // karmaresearch.vlog.TermQueryResultIterator}. - // TODO: the string operation to make null names should possibly be provided by - // VLog rather than being hardcoded here? if (s == null) { - return new NamedNullImpl("" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); + // This string operation extracts the internal rule number (val >> 40), + // the internal variable number ((val >> 32) & 0377), and + // a counter (val & 0xffffffffL) + return new NamedNullImpl("null" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); } else { return VLogToModelConverter.toConstant(s); } From 166bcb9ae27507bfbf72595a45e35b27b4992136 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 21:53:15 +0200 Subject: [PATCH 0889/1255] Restore Java 8 compatibility --- .../rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 6f0696987..518f5d71a 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -1,6 +1,7 @@ package org.semanticweb.rulewerk.reasoner.vlog; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -175,7 +176,7 @@ public QueryResult next() { } if (resultSize == 1) { // Caching is pointless for unary queries - return new QueryResultImpl(List.of(computeTerm(idTuple[0]))); + return new QueryResultImpl(Collections.singletonList(computeTerm(idTuple[0]))); } // (Array.copyOf was slightly faster than System.arraycopy in tests) From 44c97816d28c8f0e680447b364e2f6ec3ff80f49 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 18 Jun 2020 22:17:19 +0200 Subject: [PATCH 0890/1255] Remove unnecessary import --- .../rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 518f5d71a..17acf658e 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -3,7 +3,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; From a60919acf7c4e3fb16a4e199f411f8134e17824f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 14:45:29 +0200 Subject: [PATCH 0891/1255] Support extraction of Java types from constants --- .../model/api/PrefixDeclarationRegistry.java | 4 + .../rulewerk/core/model/api/Terms.java | 89 +++++++++++++++++++ 2 files changed, 93 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index cde555c76..49cc7abe3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -38,6 +38,10 @@ public interface PrefixDeclarationRegistry extends Iterable getDatatypeConstants(Stream terms) return terms.filter(term -> term.getType() == TermType.DATATYPE_CONSTANT).map(DatatypeConstant.class::cast); } + /** + * Returns the lexical value of a term that is an xsd:string constant, and + * throws an exception for all other cases. + * + * @param term the term from which the string is to be extracted + * @return extracted string + * @throws IllegalArgumentException if the given term is not a constant of type + * xsd:string + */ + public static String extractString(Term term) { + if (term.getType() == TermType.DATATYPE_CONSTANT) { + DatatypeConstant datatypeConstant = (DatatypeConstant) term; + if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) + return datatypeConstant.getLexicalValue(); + } + throw new IllegalArgumentException( + "Term " + term.toString() + " is not a datatype constant of type xsd:string."); + } + + /** + * Returns the IRI representation of an abstract term, and throws an exception + * for all other cases. + * + * @param term the term from which the IRI is to be extracted + * @return extracted IRI + * @throws IllegalArgumentException if the given term is not an abstract + * constant or cannot be parsed as an IRI + */ + public static URI extractIri(Term term) { + if (term.getType() == TermType.ABSTRACT_CONSTANT) { + try { + return new URI(term.getName()); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); + } + + /** + * Returns the URL representation of an abstract term, and throws an exception + * for all other cases. + * + * @param term the term from which the URL is to be extracted + * @return extracted URL + * @throws IllegalArgumentException if the given term is not an abstract + * constant or cannot be parsed as a URL + */ + public static URL extractUrl(Term term) { + if (term.getType() == TermType.ABSTRACT_CONSTANT) { + try { + return new URL(term.getName()); + } catch (MalformedURLException e) { + throw new IllegalArgumentException(e); + } + } + throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); + } + + /** + * Returns the numeric value of a term that is an xsd:integer (or supported + * subtype) constant, and throws an exception for all other cases. + * + * @param term the term from which the integer is to be extracted + * @return extracted integer + * @throws IllegalArgumentException if the given term is not a constant of an + * integer type, or if the lexical + * representation could not be parsed into a + * Java int + */ + public static int extractInt(Term term) { + if (term.getType() == TermType.DATATYPE_CONSTANT) { + DatatypeConstant datatypeConstant = (DatatypeConstant) term; + if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_LONG.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_INT.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_SHORT.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_BYTE.equals(datatypeConstant.getDatatype())) + return Integer.parseInt(datatypeConstant.getLexicalValue()); + } + throw new IllegalArgumentException( + "Term " + term.toString() + " is not a datatype constant of a supported integer type."); + } + } From 6dc399f2d7e1724db2d412e3aab4df018c102ec6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 14:45:51 +0200 Subject: [PATCH 0892/1255] Improve names of correctness constants --- .../org/semanticweb/rulewerk/core/reasoner/Correctness.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java index 264616546..e2a00152c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java @@ -33,13 +33,13 @@ public enum Correctness { * Completeness is not guaranteed, but soundness is. For example, query * answering yields sound, but possibly incomplete answers. */ - SOUND_BUT_INCOMPLETE("sound but incomplete"), + SOUND_BUT_INCOMPLETE("sound but possibly incomplete"), /** * Soundness is not guaranteed. For example, query answering may give incorrect * (unsound and incomplete) answers. */ - INCORRECT("incorrect"), + INCORRECT("possibly incorrect"), /** * Correctness is guaranteed. For example, query answering yealds are correct From 080f7ab29e5dcdc2af2fa925944a05f0a69510ee Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 14:51:04 +0200 Subject: [PATCH 0893/1255] Structural changes in Parser - Support - and _ in predicate names - Support Rule and PositiveLiteral arguments to (custom) directives - Use PositiveLiteral to capture source declarations; update all handlers to use this - Do not allow data source handlers to have side effects on the KB or parser --- .../rulewerk/parser/DirectiveArgument.java | 140 ++++++++++++++++-- .../rulewerk/parser/ParserConfiguration.java | 36 ++--- .../CsvFileDataSourceDeclarationHandler.java | 12 +- .../RdfFileDataSourceDeclarationHandler.java | 12 +- ...eryResultDataSourceDeclarationHandler.java | 19 +-- .../rulewerk/parser/javacc/JavaCCParser.jj | 36 +++-- .../parser/javacc/JavaCCParserBase.java | 7 +- .../parser/ParserConfigurationTest.java | 2 +- .../parser/RuleParserDataSourceTest.java | 54 +------ 9 files changed, 196 insertions(+), 122 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java index c31270d35..dd47cdffa 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java @@ -24,6 +24,8 @@ import java.util.Optional; import java.util.function.Function; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; /** @@ -39,14 +41,18 @@ private DirectiveArgument() { /** * Apply a function to the contained value. * - * @param stringHandler the function to apply to a string argument - * @param iriHandler the function to apply to an IRI - * @param termHandler the function to apply to a Term + * @param stringHandler the function to apply to a string argument + * @param iriHandler the function to apply to an IRI + * @param termHandler the function to apply to a Term + * @param ruleHandler the function to apply to a Rule + * @param positiveLiteralHandler the function to apply to a Literal * * @return the value returned by the appropriate handler function */ public abstract V apply(Function stringHandler, - Function iriHandler, Function termHandler); + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler); /** * Partially compare two arguments, without comparing the actual values. @@ -85,7 +91,9 @@ public static DirectiveArgument string(String value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { return stringHandler.apply(value); } @@ -98,7 +106,8 @@ public boolean equals(Object other) { } DirectiveArgument otherArgument = (DirectiveArgument) other; - return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false); + return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false, rule -> false, + positiveLiteral -> false); } @Override @@ -119,7 +128,9 @@ public static DirectiveArgument iri(URI value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { return iriHandler.apply(value); } @@ -132,7 +143,8 @@ public boolean equals(Object other) { } DirectiveArgument otherArgument = (DirectiveArgument) other; - return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false); + return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false, rule -> false, + positiveLiteral -> false); } @Override @@ -153,7 +165,9 @@ public static DirectiveArgument term(Term value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { return termHandler.apply(value); } @@ -166,7 +180,8 @@ public boolean equals(Object other) { } DirectiveArgument otherArgument = (DirectiveArgument) other; - return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value)); + return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value), rule -> false, + positiveLiteral -> false); } @Override @@ -176,6 +191,80 @@ public int hashCode() { }; } + /** + * Create an argument containing a Rule. + * + * @param value the Rule value + * + * @return An argument containing the given Rule value + */ + public static DirectiveArgument rule(Rule value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { + return ruleHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> rule.equals(value), + positiveLiteral -> false); + } + + @Override + public int hashCode() { + return 53 * value.hashCode(); + } + }; + } + + /** + * Create an argument containing a PositiveLiteral. + * + * @param value the PositiveLiteral value + * + * @return An argument containing the given PositiveLiteral value + */ + public static DirectiveArgument positiveLiteral(PositiveLiteral value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { + return positiveLiteralHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> false, + positiveLiteral -> positiveLiteral.equals(value)); + } + + @Override + public int hashCode() { + return 59 * value.hashCode(); + } + }; + } + /** * Create an optional from a (possible) string value. * @@ -183,7 +272,8 @@ public int hashCode() { * the argument doesn't contain a string. */ public Optional fromString() { - return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty()); + return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), + value -> Optional.empty()); } /** @@ -193,7 +283,8 @@ public Optional fromString() { * argument doesn't contain a IRI. */ public Optional fromIri() { - return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); + return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty(), value -> Optional.empty(), + value -> Optional.empty()); } /** @@ -203,6 +294,29 @@ public Optional fromIri() { * the argument doesn't contain a Term. */ public Optional fromTerm() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of); + return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of, value -> Optional.empty(), + value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) Rule value. + * + * @return An optional containing the contained Rule, or an empty Optional if + * the argument doesn't contain a Rule. + */ + public Optional fromRule() { + return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), Optional::of, + value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) PositiveLiteral value. + * + * @return An optional containing the contained PositiveLiteral, or an empty + * Optional if the argument doesn't contain a PositiveLitreal. + */ + public Optional fromPositiveLiteral() { + return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), + value -> Optional.empty(), Optional::of); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index f6bc3b9d3..63d788f01 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -28,11 +28,13 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** @@ -78,7 +80,8 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see the grammar + * @see + * the grammar * * @param name Name of the data source, as it appears in the declaring * directive. @@ -96,30 +99,28 @@ public ParserConfiguration registerDataSource(final String name, final DataSourc } /** - * Parse the source-specific part of a Data Source declaration. + * Parse the source-specific part of a data source declaration. * * This is called by the parser to construct a {@link DataSourceDeclaration}. It * is responsible for instantiating an appropriate {@link DataSource} type. * - * @param name Name of the data source. - * @param args arguments given in the data source declaration. - * @param subParserFactory a {@link SubParserFactory} instance that creates - * parser with the same context as the current parser. + * @param declaration literal that specifies the type and parameters for this + * data source declarations * - * @throws ParsingException when the declaration is invalid, e.g., if the Data - * Source is not known. + * @throws ParsingException when the declaration is invalid, e.g., if the data + * source is not known. * - * @return the Data Source instance. + * @return the data source instance. */ - public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, - final List args, final SubParserFactory subParserFactory) throws ParsingException { - final DataSourceDeclarationHandler handler = this.dataSources.get(name); + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLiteral declaration) + throws ParsingException { + final DataSourceDeclarationHandler handler = this.dataSources.get(declaration.getPredicate().getName()); if (handler == null) { - throw new ParsingException("Data source \"" + name + "\" is not known."); + throw new ParsingException("Data source \"" + declaration.getPredicate().getName() + "\" is not known."); } - return handler.handleDirective(args, subParserFactory); + return handler.handleDataSourceDeclaration(declaration.getArguments()); } /** @@ -286,7 +287,8 @@ public ParserConfiguration allowNamedNulls() { } /** - * Disallow parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull}. + * Disallow parsing of + * {@link org.semanticweb.rulewerk.core.model.api.NamedNull}. * * @return this */ @@ -295,8 +297,8 @@ public ParserConfiguration disallowNamedNulls() { } /** - * Whether parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull} is - * allowed. + * Whether parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull} + * is allowed. * * @return true iff parsing of NamedNulls is allowed. */ diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 67a66c9c4..185f073ca 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -24,12 +24,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; -import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing {@link CsvFileDataSource} declarations @@ -38,10 +35,9 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException { - DirectiveHandler.validateNumberOfArguments(arguments, 1); - String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "CSV file name"); try { return new CsvFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 259c26759..36a3738c4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -24,12 +24,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; -import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing {@link RdfFileDataSource} declarations @@ -38,10 +35,9 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException { - DirectiveHandler.validateNumberOfArguments(arguments, 1); - String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "RDF file name"); try { return new RdfFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index e91632ac9..3a405d83c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -24,12 +24,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; -import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing {@link SparqlQueryResultDataSource} declarations @@ -37,13 +34,13 @@ * @author Maximilian Marx */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { - @Override - public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException { - DirectiveHandler.validateNumberOfArguments(arguments, 3); - URL endpoint = DirectiveHandler.validateUrlArgument(arguments.get(0), "SPARQL endpoint"); - String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); - String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); + + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 3); + URL endpoint = DataSourceDeclarationHandler.validateUrlArgument(terms.get(0), "SPARQL endpoint URL"); + String variables = DataSourceDeclarationHandler.validateStringArgument(terms.get(1), + "SPARQL query variable list"); + String query = DataSourceDeclarationHandler.validateStringArgument(terms.get(2), "SPARQL query pattern"); return new SparqlQueryResultDataSource(endpoint, variables, query); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 7a80fd52c..e932e160e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -34,17 +34,14 @@ options PARSER_BEGIN(JavaCCParser) package org.semanticweb.rulewerk.parser.javacc; -import java.io.File; -import java.io.InputStream; -import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.List; -import java.util.Deque; import java.util.ArrayList; -import java.util.ArrayDeque; import java.util.LinkedList; +import java.util.ArrayDeque; +import java.util.Deque; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.DirectiveArgument; @@ -62,9 +59,6 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -147,12 +141,11 @@ void source() throws PrefixDeclarationException : { } DataSource dataSource() throws PrefixDeclarationException : { - Token sourceName; + PositiveLiteral positiveLiteral; List< DirectiveArgument > arguments; } { - ( sourceName = < ARGUMENT_NAME > - | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { - return parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); + positiveLiteral = positiveLiteral(FormulaContext.HEAD) { + return parseDataSourceSpecificPartOfDataSourceDeclaration(positiveLiteral); } } @@ -165,6 +158,16 @@ KnowledgeBase directive() throws PrefixDeclarationException : { } } +/*TODO List< DirectiveArgument > command() throws PrefixDeclarationException : { + Token name; + List< DirectiveArgument > arguments; +} { + name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { + arguments.add(0, DirectiveArgument.string(name.image)); + return arguments; + } +}*/ + void statement() throws PrefixDeclarationException : { Statement statement; KnowledgeBase knowledgeBase; @@ -432,10 +435,14 @@ String String() : { LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : { DirectiveArgument argument; String str; + Rule rule; + PositiveLiteral positiveLiteral; Term t; LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); } { - ( LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } + ( LOOKAHEAD(rule()) rule = rule() { argument = DirectiveArgument.rule(rule); } + | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = DirectiveArgument.positiveLiteral(positiveLiteral); } + | LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } | LOOKAHEAD(absoluteIri()) str = absoluteIri() { URI url; try { @@ -479,9 +486,10 @@ MORE : { } < DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS > TOKEN : { - < VARORPREDNAME : < A2Z> (< A2ZN >)* > + < VARORPREDNAME : < A2Z> (< A2ZND >)* > | < #A2Z : [ "a"-"z", "A"-"Z" ] > | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > + | < #A2ZND : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > | < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > | < PNAME_NS : < PN_PREFIX > ":" > | < #PN_CHARS_BASE : [ "a"-"z", "A"-"Z", "\u00c0"-"\u00d6", diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 54126cd0f..8fd4e8c84 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -28,6 +28,7 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Statement; @@ -330,11 +331,9 @@ public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { return this.prefixDeclarationRegistry; } - DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, - List arguments, SubParserFactory subParserFactory) throws ParseException { + DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLiteral declaration) throws ParseException { try { - return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(syntacticForm, arguments, - subParserFactory); + return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(declaration); } catch (ParsingException e) { throw makeParseExceptionWithCause( "Failed while trying to parse the source-specific part of a data source declaration", e); diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java index cae03b2c7..c9d513f6e 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java @@ -28,9 +28,9 @@ import org.junit.Test; import org.mockito.Mock; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.DatatypeConstantHandler; import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index e72ae9dfb..1a376bc3e 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -31,21 +31,20 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; -import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.parser.DataSourceDeclarationHandler; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; public class RuleParserDataSourceTest { private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; @@ -140,16 +139,15 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDirective(ArgumentMatchers.>any(), - ArgumentMatchers.any()); + doReturn(source).when(handler).handleDataSourceDeclaration(ArgumentMatchers.>any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - List expectedArguments = Arrays.asList(DirectiveArgument.string("hello"), - DirectiveArgument.string("world")); + List expectedArguments = Arrays.asList( + Expressions.makeDatatypeConstant("hello", PrefixDeclarationRegistry.XSD_STRING), + Expressions.makeDatatypeConstant("world", PrefixDeclarationRegistry.XSD_STRING)); RuleParser.parse(input, parserConfiguration); - verify(handler).handleDirective(ArgumentMatchers.eq(expectedArguments), - ArgumentMatchers.any()); + verify(handler).handleDataSourceDeclaration(ArgumentMatchers.eq(expectedArguments)); } @Test @@ -195,40 +193,4 @@ public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws Pa RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } - class DuplicatingDataSourceDeclarationHandler implements DataSourceDeclarationHandler { - public DataSource handleDirective(List arguments, SubParserFactory subParserFactory) - throws ParsingException { - CsvFileDataSource source; - try { - source = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); - } catch (IOException e) { - throw new ParsingException(e); - } - - KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); - ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); - RuleParser.parseInto(knowledgeBase, "@source q[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") .", - parserConfiguration); - - return source; - } - } - - @Test - public void parseInto_mockDataSourceWithBase_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); - String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - KnowledgeBase knowledgeBase = new KnowledgeBase(); - RuleParser.parseInto(knowledgeBase, input, parserConfiguration, "https://example.org"); - assertEquals(2, knowledgeBase.getStatements().size()); - } - - @Test(expected = ParsingException.class) - public void parseDataSourceDeclaration_unexpectedlyAddsTwoDatasources_throws() throws ParsingException { - ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); - String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - RuleParser.parseDataSourceDeclaration(input, parserConfiguration); - } } From 3510326c0961c2adc15a002e807af98fc8623544 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:14:00 +0200 Subject: [PATCH 0894/1255] Support commands in parser - Add Argument (formerly DirectiveArgument) and Command to core api - Support parsing commands with zero of more arguments --- .../rulewerk/core/model/api/Argument.java | 44 ++++----- .../rulewerk/core/model/api/Command.java | 69 +++++++++++++ .../rulewerk/core/model/ArgumentTest.java | 23 ++--- .../parser/DataSourceDeclarationHandler.java | 32 ------- .../rulewerk/parser/DirectiveHandler.java | 15 +-- .../rulewerk/parser/ParserConfiguration.java | 3 +- .../rulewerk/parser/RuleParser.java | 10 ++ .../DataSourceDeclarationHandler.java | 96 +++++++++++++++++++ .../ImportFileDirectiveHandler.java | 4 +- .../ImportFileRelativeDirectiveHandler.java | 4 +- .../rulewerk/parser/javacc/JavaCCParser.jj | 61 +++++++----- .../parser/javacc/JavaCCParserBase.java | 4 +- .../rulewerk/parser/CommandParserTest.java | 23 +++++ .../rulewerk/parser/DirectiveHandlerTest.java | 11 ++- 14 files changed, 289 insertions(+), 110 deletions(-) rename rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java => rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java (88%) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java rename rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java => rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java (73%) delete mode 100644 rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java create mode 100644 rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java create mode 100644 rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java similarity index 88% rename from rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index dd47cdffa..7fe488150 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.parser; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -24,18 +24,14 @@ import java.util.Optional; import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; - /** * A tagged union representing the possible types allowed to appear as arguments - * in directives. + * in commands and parser directives. * * @author Maximilian Marx */ -public abstract class DirectiveArgument { - private DirectiveArgument() { +public abstract class Argument { + private Argument() { } /** @@ -73,7 +69,7 @@ protected Optional isEqual(Object other) { return Optional.of(true); } - if (!(other instanceof DirectiveArgument)) { + if (!(other instanceof Argument)) { return Optional.of(false); } @@ -87,8 +83,8 @@ protected Optional isEqual(Object other) { * * @return An argument containing the given string value */ - public static DirectiveArgument string(String value) { - return new DirectiveArgument() { + public static Argument string(String value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -105,7 +101,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false, rule -> false, positiveLiteral -> false); } @@ -124,8 +120,8 @@ public int hashCode() { * * @return An argument containing the given IRI value */ - public static DirectiveArgument iri(URI value) { - return new DirectiveArgument() { + public static Argument iri(URI value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -142,7 +138,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false, rule -> false, positiveLiteral -> false); } @@ -161,8 +157,8 @@ public int hashCode() { * * @return An argument containing the given Term value */ - public static DirectiveArgument term(Term value) { - return new DirectiveArgument() { + public static Argument term(Term value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -179,7 +175,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value), rule -> false, positiveLiteral -> false); } @@ -198,8 +194,8 @@ public int hashCode() { * * @return An argument containing the given Rule value */ - public static DirectiveArgument rule(Rule value) { - return new DirectiveArgument() { + public static Argument rule(Rule value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -216,7 +212,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> rule.equals(value), positiveLiteral -> false); } @@ -235,8 +231,8 @@ public int hashCode() { * * @return An argument containing the given PositiveLiteral value */ - public static DirectiveArgument positiveLiteral(PositiveLiteral value) { - return new DirectiveArgument() { + public static Argument positiveLiteral(PositiveLiteral value) { + return new Argument() { @Override public V apply(Function stringHandler, Function iriHandler, Function termHandler, @@ -253,7 +249,7 @@ public boolean equals(Object other) { return maybeEquals.get(); } - DirectiveArgument otherArgument = (DirectiveArgument) other; + Argument otherArgument = (Argument) other; return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> false, positiveLiteral -> positiveLiteral.equals(value)); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java new file mode 100644 index 000000000..855652158 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -0,0 +1,69 @@ +package org.semanticweb.rulewerk.core.model.api; + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * Class for representing a generic command that can be executed. + * + * @author Markus Kroetzsch + * + */ +public class Command implements Entity { + + final String name; + final List arguments; + + /** + * Constructor + * + * @param name String name of the command + * @param arguments list of arguments of the command + */ + public Command(String name, List arguments) { + this.name = name; + this.arguments = arguments; + } + + /** + * Returns the command name. + * + * @return + */ + public String getName() { + return name; + } + + /** + * Returns the command arguments. + * + * @return + */ + public List getArguments() { + return arguments; + } + + @Override + public String getSyntacticRepresentation() { + StringBuilder result = new StringBuilder("@"); + result.append(name); + for (Argument argument : arguments) { + result.append(" "); + if (argument.fromRule().isPresent()) { + Rule rule = argument.fromRule().get(); + result.append(Serializer.getString(rule.getHead())).append(Serializer.RULE_SEPARATOR) + .append(Serializer.getString(rule.getBody())); + } else if (argument.fromPositiveLiteral().isPresent()) { + result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); + } else if (argument.fromString().isPresent()) { + result.append(Serializer.getString(argument.fromString().get())); + } else { + throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); + } + } + result.append(Serializer.STATEMENT_SEPARATOR); + return result.toString(); + } + +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java similarity index 73% rename from rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java index 2ce1af622..bcec475d1 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.parser; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,17 +24,18 @@ import java.net.URI; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -public class DirectiveArgumentTest { +public class ArgumentTest { private static final String STRING = "src/test/resources/facts.rls"; private static final URI IRI = URI.create("https://example.org"); private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); - private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); - private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); - private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + private static final Argument STRING_ARGUMENT = Argument.string(STRING); + private static final Argument IRI_ARGUMENT = Argument.iri(IRI); + private static final Argument TERM_ARGUMENT = Argument.term(TERM); @Test public void equals_null_returnsFalse() { @@ -52,17 +53,17 @@ public void equals_self_returnsTrue() { @Test public void equals_equal_returnsTrue() { - assertTrue(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING))); - assertTrue(IRI_ARGUMENT.equals(DirectiveArgument.iri(IRI))); - assertTrue(TERM_ARGUMENT.equals(DirectiveArgument.term(TERM))); + assertTrue(STRING_ARGUMENT.equals(Argument.string(STRING))); + assertTrue(IRI_ARGUMENT.equals(Argument.iri(IRI))); + assertTrue(TERM_ARGUMENT.equals(Argument.term(TERM))); } @Test public void equals_notEqualButSameType_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING + "test"))); - assertFalse(IRI_ARGUMENT.equals(DirectiveArgument.iri(URI.create("https://example.com")))); + assertFalse(STRING_ARGUMENT.equals(Argument.string(STRING + "test"))); + assertFalse(IRI_ARGUMENT.equals(Argument.iri(URI.create("https://example.com")))); assertFalse(TERM_ARGUMENT - .equals(DirectiveArgument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); + .equals(Argument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); } @Test diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java deleted file mode 100644 index e17ae1d1e..000000000 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.semanticweb.rulewerk.parser; - -/*- - * #%L - * Rulewerk Parser - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.DataSource; - -/** - * Handler for parsing a custom Data Source declaration. - * - * @author Maximilian Marx - */ -@FunctionalInterface -public interface DataSourceDeclarationHandler extends DirectiveHandler { -} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 76c75e716..1be48eba7 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -27,6 +27,7 @@ import java.nio.file.InvalidPathException; import java.util.List; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -53,7 +54,7 @@ public interface DirectiveHandler { * directive, or the number of arguments is invalid. * @return a {@code T} instance corresponding to the given arguments. */ - public T handleDirective(List arguments, final SubParserFactory subParserFactory) + public T handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -65,7 +66,7 @@ public T handleDirective(List arguments, final SubParserFacto * @throws ParsingException when the given number of Arguments is invalid for * the Directive statement. */ - public static void validateNumberOfArguments(final List arguments, final int number) + public static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException( @@ -84,7 +85,7 @@ public static void validateNumberOfArguments(final List argum * * @return the contained {@link String}. */ - public static String validateStringArgument(final DirectiveArgument argument, final String description) + public static String validateStringArgument(final Argument argument, final String description) throws ParsingException { return argument.fromString() .orElseThrow(() -> new ParsingException("description \"" + argument + "\" is not a string.")); @@ -101,7 +102,7 @@ public static String validateStringArgument(final DirectiveArgument argument, fi * * @return the File corresponding to the contained file path. */ - public static File validateFilenameArgument(final DirectiveArgument argument, final String description) + public static File validateFilenameArgument(final Argument argument, final String description) throws ParsingException { String fileName = DirectiveHandler.validateStringArgument(argument, description); File file = new File(fileName); @@ -126,7 +127,7 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi * * @return the contained IRI. */ - public static URI validateIriArgument(final DirectiveArgument argument, final String description) + public static URI validateIriArgument(final Argument argument, final String description) throws ParsingException { return argument.fromIri() .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not an IRI.")); @@ -143,7 +144,7 @@ public static URI validateIriArgument(final DirectiveArgument argument, final St * * @return the {@link URL} corresponding to the contained IRI. */ - public static URL validateUrlArgument(final DirectiveArgument argument, final String description) + public static URL validateUrlArgument(final Argument argument, final String description) throws ParsingException { URI iri = DirectiveHandler.validateIriArgument(argument, description); try { @@ -164,7 +165,7 @@ public static URL validateUrlArgument(final DirectiveArgument argument, final St * * @return the contained {@link Term}. */ - public static Term validateTermArgument(final DirectiveArgument argument, final String description) + public static Term validateTermArgument(final Argument argument, final String description) throws ParsingException { return argument.fromTerm() .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not a Term.")); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 63d788f01..cec1ad19e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; @@ -253,7 +254,7 @@ public ParserConfiguration registerDirective(String name, DirectiveHandler arguments, + public KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParsingException { final DirectiveHandler handler = this.directives.get(name); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 5ffdcf281..124bb5381 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -25,6 +25,7 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -252,6 +253,15 @@ public static DataSourceDeclaration parseDataSourceDeclaration(final String inpu return parseDataSourceDeclaration(input, null); } + public static Command parseCommand(final String input, ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxFragment(input, JavaCCParser::command, "command", parserConfiguration); + } + + public static Command parseCommand(final String input) throws ParsingException { + return parseCommand(input, null); + } + static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { try { parser.parse(); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java new file mode 100644 index 000000000..88801c331 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java @@ -0,0 +1,96 @@ +package org.semanticweb.rulewerk.parser.datasources; + +import java.net.URL; +import java.util.List; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for interpreting the arguments of a custom Data Source declaration. + * + * @author Markus Kroetzsch + */ +@FunctionalInterface +public interface DataSourceDeclarationHandler { + + DataSource handleDataSourceDeclaration(List terms) throws ParsingException; + + /** + * Validate the provided number of arguments to the source declaration. + * + * @param terms arguments given to the source declaration. + * @param number expected number of arguments + * + * @throws ParsingException when the number of terms does not match expectations + */ + public static void validateNumberOfArguments(final List terms, final int number) throws ParsingException { + if (terms.size() != number) { + throw new ParsingException( + "Invalid number of arguments " + terms.size() + " for @source declaration, expected " + number); + } + } + + /** + * Returns the string content of the given term, or reports an error if the term + * is not an xsd:string. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @return the extracted string + * @throws ParsingException thrown if the term was not a String + */ + public static String validateStringArgument(Term term, String parameterName) throws ParsingException { + try { + return Terms.extractString(term); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, e); + } + } + + /** + * Returns the URL represented by the given term, or reports an error if no + * valid URL could be extracted from the term. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @return the extracted URL + * @throws ParsingException thrown if the term was not a URL + */ + public static URL validateUrlArgument(Term term, String parameterName) throws ParsingException { + try { + return Terms.extractUrl(term); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, e); + } + } + + static ParsingException makeParameterParsingException(Term term, String parameterName, Throwable cause) { + return new ParsingException("Expected " + parameterName + " to be a string. Found " + term.toString() + ".", + cause); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index ae227a9d1..2580a2b56 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -26,8 +26,8 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -42,7 +42,7 @@ public class ImportFileDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index 7de06a3ea..f70831417 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -26,9 +26,9 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.DirectiveHandler; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -42,7 +42,7 @@ */ public class ImportFileRelativeDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(List arguments, SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); PrefixDeclarationRegistry prefixDeclarationRegistry = getPrefixDeclarationRegistry(subParserFactory); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index e932e160e..59e2c3f85 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -44,7 +44,6 @@ import java.util.ArrayDeque; import java.util.Deque; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; @@ -60,6 +59,9 @@ import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.script.Argument; +import org.semanticweb.rulewerk.core.script.Command; + import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class JavaCCParser extends JavaCCParserBase @@ -142,7 +144,7 @@ void source() throws PrefixDeclarationException : { DataSource dataSource() throws PrefixDeclarationException : { PositiveLiteral positiveLiteral; - List< DirectiveArgument > arguments; + List< Argument > arguments; } { positiveLiteral = positiveLiteral(FormulaContext.HEAD) { return parseDataSourceSpecificPartOfDataSourceDeclaration(positiveLiteral); @@ -151,22 +153,25 @@ DataSource dataSource() throws PrefixDeclarationException : { KnowledgeBase directive() throws PrefixDeclarationException : { Token name; - List< DirectiveArgument > arguments; + List< Argument > arguments; } { name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { return parseDirectiveStatement(name.image, arguments, getSubParserFactory()); } } -/*TODO List< DirectiveArgument > command() throws PrefixDeclarationException : { +Command command() throws PrefixDeclarationException : { Token name; - List< DirectiveArgument > arguments; + List< Argument > arguments; } { - name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { - arguments.add(0, DirectiveArgument.string(name.image)); - return arguments; - } -}*/ + name = < CUSTOM_DIRECTIVE > + ( arguments = Arguments() < DOT > { + return new Command(name.image,arguments); + } | + < DOT > { + return new Command(name.image, new LinkedList< Argument >()); + } ) +} void statement() throws PrefixDeclarationException : { Statement statement; @@ -180,10 +185,18 @@ void statement() throws PrefixDeclarationException : { } Rule rule() throws PrefixDeclarationException : { + Rule rule; +} { + rule = ruleNoDot() < DOT > { + return rule; + } +} + +Rule ruleNoDot() throws PrefixDeclarationException : { List < PositiveLiteral > head; List < Literal > body; } { - head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > { + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) { // check that the intersection between headExiVars and BodyVars is empty for (String variable : headExiVars) { if (bodyVars.contains(variable)) @@ -432,17 +445,17 @@ String String() : { ) { return unescapeStr(t.image, t.beginLine, t.beginColumn); } } -LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : { - DirectiveArgument argument; +LinkedList< Argument > Arguments() throws PrefixDeclarationException : { + Argument argument; String str; Rule rule; PositiveLiteral positiveLiteral; Term t; - LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); + LinkedList< Argument > rest = new LinkedList< Argument >(); } { - ( LOOKAHEAD(rule()) rule = rule() { argument = DirectiveArgument.rule(rule); } - | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = DirectiveArgument.positiveLiteral(positiveLiteral); } - | LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } + ( LOOKAHEAD(ruleNoDot()) rule = ruleNoDot() { argument = Argument.rule(rule); } + | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = Argument.positiveLiteral(positiveLiteral); } + | LOOKAHEAD(String()) str = String() { argument = Argument.string(str); } | LOOKAHEAD(absoluteIri()) str = absoluteIri() { URI url; try { @@ -450,10 +463,10 @@ LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : } catch (URISyntaxException e) { throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); } - argument = DirectiveArgument.iri(url); + argument = Argument.iri(url); } - | t = term(FormulaContext.HEAD) { argument = DirectiveArgument.term(t); } - ) [< COMMA > rest = Arguments()] { + | t = term(FormulaContext.HEAD) { argument = Argument.term(t); } + ) [rest = Arguments()] { rest.addFirst(argument); return rest; } @@ -539,11 +552,11 @@ MORE : { | < COLON : ":" > } -TOKEN : { - < ARROW : ":-" > : BODY +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { + < ARROW : ":-" > } -< DEFAULT, BODY > TOKEN : { +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { < TILDE : "~" > } @@ -575,7 +588,7 @@ TOKEN : { | < ARGUMENT_NAME : < DIRECTIVENAME > > } -< TERM > TOKEN : { +< TERM, DIRECTIVE_ARGUMENTS > TOKEN : { < UNIVAR : "?" < VARORPREDNAME > > | < EXIVAR : "!" < VARORPREDNAME > > | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 8fd4e8c84..9ade274bf 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -25,6 +25,7 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.NamedNull; @@ -38,7 +39,6 @@ import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.DirectiveArgument; import org.semanticweb.rulewerk.parser.LocalPrefixDeclarationRegistry; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -345,7 +345,7 @@ Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syn return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); } - KnowledgeBase parseDirectiveStatement(String name, List arguments, + KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParseException { try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java new file mode 100644 index 000000000..93c527d1d --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -0,0 +1,23 @@ +package org.semanticweb.rulewerk.parser; + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; + +public class CommandParserTest { + + @Test + public void parseCommand() throws ParsingException { + String input = "@query p(?X, a):- q(?X) \"string\" abcd p(a) ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals("query", command.getName()); + assertEquals(5, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromRule().isPresent()); + assertTrue(command.getArguments().get(1).fromString().isPresent()); + assertTrue(command.getArguments().get(2).fromTerm().isPresent()); + assertTrue(command.getArguments().get(3).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(4).fromIri().isPresent()); + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 02f58e5ad..725d54dc1 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -26,6 +26,7 @@ import java.net.URI; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -34,9 +35,9 @@ public class DirectiveHandlerTest { private static final URI IRI = URI.create("https://example.org"); private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); - private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); - private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); - private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + private static final Argument STRING_ARGUMENT = Argument.string(STRING); + private static final Argument IRI_ARGUMENT = Argument.iri(IRI); + private static final Argument TERM_ARGUMENT = Argument.term(TERM); @Test public void validateStringArgument_stringArgument_succeeds() throws ParsingException { @@ -90,7 +91,7 @@ public void validateFilenameArgument_filename_succeeds() throws ParsingException @Test public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { - DirectiveHandler.validateFilenameArgument(DirectiveArgument.string(STRING + "-nonexistant"), + DirectiveHandler.validateFilenameArgument(Argument.string(STRING + "-nonexistant"), "filename argument"); } @@ -101,7 +102,7 @@ public void validateUrlArgument_url_succeeds() throws ParsingException, Malforme @Test(expected = ParsingException.class) public void validateUrlArgument_invalidUrl_throws() throws ParsingException { - DirectiveHandler.validateUrlArgument(DirectiveArgument.iri(URI.create("example://test")), "url argument"); + DirectiveHandler.validateUrlArgument(Argument.iri(URI.create("example://test")), "url argument"); } } From c472c46e8a812778eab95d81a57e6d0937cda51c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:47:37 +0200 Subject: [PATCH 0895/1255] Utility class to measure times --- .../rulewerk/core/reasoner/Timer.java | 545 ++++++++++++++++++ 1 file changed, 545 insertions(+) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java new file mode 100644 index 000000000..9b555ede2 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java @@ -0,0 +1,545 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadMXBean; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Class for keeping CPU and system times. The class has a number of features + * that can be used to measure and aggregate times across many threads and many + * methods. + * + * @author Markus Kroetzsch + */ +public class Timer { + + private static Logger LOGGER = LoggerFactory.getLogger(Timer.class); + + /** Flag for indicating that no times should be taken (just count runs). */ + public static final int RECORD_NONE = 0x00000000; + /** Flag for indicating that CPU time should be taken. */ + public static final int RECORD_CPUTIME = 0x00000001; + /** Flag for indicating that wall clock time should be taken. */ + public static final int RECORD_WALLTIME = 0x00000002; + /** Flag for indicating that all supported times should be taken. */ + public static final int RECORD_ALL = RECORD_CPUTIME | RECORD_WALLTIME; + + static final ThreadMXBean tmxb = ManagementFactory.getThreadMXBean(); + + static final ConcurrentHashMap registeredTimers = new ConcurrentHashMap(); + + protected final String name; + protected final long threadId; + protected final int todoFlags; + + protected long currentStartCpuTime = -1; + protected long currentStartWallTime = -1; + protected boolean isRunning = false; + protected long totalCpuTime = 0; + protected long totalWallTime = 0; + protected int measurements = 0; + protected int threadCount = 0; + + /** + * Constructor. Every timer is identified by three things: a string name, an + * integer for flagging its tasks (todos), and a thread id (long). + * + * Tasks can be flagged by a disjunction of constants like RECORD_CPUTIME and + * RECORD_WALLTIME. Only times for which an according flag is set will be + * recorded. + * + * The thread id can be the actual id of the thread that is measured, or 0 + * (invalid id) to not assign the timer to any thread. In this case, no CPU time + * measurement is possible since Java does not allow us to measure the total CPU + * time across all threads. + * + * @param name + * @param todoFlags + * @param threadId + */ + public Timer(String name, int todoFlags, long threadId) { + this.name = name; + this.todoFlags = todoFlags; + this.threadId = threadId; + + if (!tmxb.isThreadCpuTimeEnabled()) { + tmxb.setThreadCpuTimeEnabled(true); + } + } + + public Timer(String name) { + this(name, RECORD_ALL, Thread.currentThread().getId()); + } + + /** + * + * @param name + * @param todoFlags + * @return a new {@link Timer} for the current thread + */ + static public Timer getTimerForCurrentThread(String name, int todoFlags) { + return new Timer(name, todoFlags, Thread.currentThread().getId()); + } + + /** + * Get the total recorded CPU time in nanoseconds. + * + * @return recorded CPU time in nanoseconds + */ + public long getTotalCpuTime() { + return totalCpuTime; + } + + public long getAvgCpuTime() { + return totalCpuTime > 0 && measurements > 0 ? totalCpuTime / measurements : -1; + } + + /** + * Get the string name of the timer. + * + * @return string name + */ + public String getName() { + return name; + } + + /** + * Get the ID of the thread for which this timer was created. + * + * @return thread ID + */ + public long getThreadId() { + return threadId; + } + + /** + * Get the total recorded wall clock time in nanoseconds. + * + * @return recorded wall time in nanoseconds + */ + public long getTotalWallTime() { + return totalWallTime; + } + + public long getAvgWallTime() { + return totalWallTime > 0 && measurements > 0 ? totalWallTime / measurements : -1; + } + + /** + * Return true if the timer is running. + * + * @return true if running + */ + public boolean isRunning() { + return isRunning; + } + + /** + * Start the timer. + */ + public synchronized void start() { + if ((todoFlags & RECORD_CPUTIME) != 0) { + currentStartCpuTime = getThreadCpuTime(threadId); + } else { + currentStartCpuTime = -1; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + currentStartWallTime = System.nanoTime(); + } else { + currentStartWallTime = -1; + } + isRunning = true; + } + + /** + * Stop the timer (if running) and reset all recorded values. + */ + public synchronized void reset() { + currentStartCpuTime = -1; + currentStartWallTime = -1; + totalCpuTime = 0; + totalWallTime = 0; + measurements = 0; + isRunning = false; + threadCount = 0; + } + + /** + * Stop the timer and return the CPU time that has passed since it had last been + * started. The total time (both system and CPU) of all start-stop cycles is + * recorded with the timer. + * + * @return CPU time that the timer was running, or -1 if timer not running or + * CPU time unavailable for other reasons + */ + public synchronized long stop() { + long totalTime = -1; + + if ((todoFlags & RECORD_CPUTIME) != 0 && (currentStartCpuTime != -1)) { + long cpuTime = getThreadCpuTime(threadId); + if (cpuTime != -1) { // may fail if thread already dead + totalTime = cpuTime - currentStartCpuTime; + totalCpuTime += totalTime; + } + } + + if ((todoFlags & RECORD_WALLTIME) != 0 && (currentStartWallTime != -1)) { + long wallTime = System.nanoTime(); + totalWallTime += wallTime - currentStartWallTime; + } + + if (isRunning) { + measurements += 1; + isRunning = false; + } + + currentStartWallTime = -1; + currentStartCpuTime = -1; + + return totalTime; + } + + /** + * Print logging information for the timer. The log only shows the recorded time + * of the completed start-stop cycles. If the timer is still running, then it + * will not be stopped to add the currently measured time to the output but a + * warning will be logged. + * + */ + public void log() { + if (LOGGER.isInfoEnabled()) { + String timerLabel; + if (threadId != 0) { + timerLabel = name + " (thread " + threadId + ")"; + } else if (threadCount > 1) { + timerLabel = name + " (over " + threadCount + " threads)"; + } else { + timerLabel = name; + } + + if (todoFlags == RECORD_NONE) { + LOGGER.info("Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); + } else { + String labels = ""; + String values = ""; + String separator; + + if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { + labels += "CPU"; + values += totalCpuTime / 1000000; + separator = "/"; + } else { + separator = ""; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + labels += separator + "Wall"; + values += separator + totalWallTime / 1000000; + } + if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { + labels += "/CPU avg"; + values += "/" + (float) (totalCpuTime) / measurements / 1000000; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + labels += "/Wall avg"; + values += "/" + (float) (totalWallTime) / measurements / 1000000; + } + if (threadCount > 1) { + if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { + labels += "/CPU per thread"; + values += "/" + (float) (totalCpuTime) / threadCount / 1000000; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + labels += "/Wall per thread"; + values += "/" + (float) (totalWallTime) / threadCount / 1000000; + } + } + + LOGGER.info( + "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); + } + + if (isRunning) { + LOGGER.warn("Timer " + timerLabel + " logged while it was still running"); + } + } + } + + /** + * Start a timer of the given string name for all todos and the current thread. + * If no such timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + */ + public static void startNamedTimer(String timerName) { + getNamedTimer(timerName).start(); + } + + /** + * Start a timer of the given string name for the current thread. If no such + * timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + */ + public static void startNamedTimer(String timerName, int todoFlags) { + getNamedTimer(timerName, todoFlags).start(); + } + + /** + * Start a timer of the given string name for the current thread. If no such + * timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + */ + public static void startNamedTimer(String timerName, int todoFlags, long threadId) { + getNamedTimer(timerName, todoFlags, threadId).start(); + } + + /** + * Stop a timer of the given string name for all todos and the current thread. + * If no such timer exists, -1 will be returned. Otherwise the return value is + * the CPU time that was measured. + * + * @param timerName the name of the timer + * @return CPU time if timer existed and was running, and -1 otherwise + */ + public static long stopNamedTimer(String timerName) { + return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); + } + + /** + * Stop a timer of the given string name for the current thread. If no such + * timer exists, -1 will be returned. Otherwise the return value is the CPU time + * that was measured. + * + * @param timerName the name of the timer + * @param todoFlags + * @return CPU time if timer existed and was running, and -1 otherwise + */ + public static long stopNamedTimer(String timerName, int todoFlags) { + return stopNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); + } + + /** + * Stop a timer of the given string name for the given thread. If no such timer + * exists, -1 will be returned. Otherwise the return value is the CPU time that + * was measured. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + * @return CPU time if timer existed and was running, and -1 otherwise + */ + public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { + Timer key = new Timer(timerName, todoFlags, threadId); + if (registeredTimers.containsKey(key)) { + return registeredTimers.get(key).stop(); + } else { + return -1; + } + } + + /** + * Reset a timer of the given string name for all todos and the current thread. + * If no such timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + */ + public static void resetNamedTimer(String timerName) { + getNamedTimer(timerName).reset(); + } + + /** + * Reset a timer of the given string name for the current thread. If no such + * timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + */ + public static void resetNamedTimer(String timerName, int todoFlags) { + getNamedTimer(timerName, todoFlags).reset(); + } + + /** + * Reset a timer of the given string name for the given thread. If no such timer + * exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + */ + public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { + getNamedTimer(timerName, todoFlags, threadId).reset(); + } + + /** + * Get a timer of the given string name that takes all possible times (todos) + * for the current thread. If no such timer exists yet, then it will be newly + * created. + * + * @param timerName the name of the timer + * @return timer + */ + public static Timer getNamedTimer(String timerName) { + return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); + } + + /** + * Returns all registered timers + * + * @return an iterable collection of named timers + */ + public static Iterable getNamedTimers() { + return registeredTimers.keySet(); + } + + /** + * Get a timer of the given string name and todos for the current thread. If no + * such timer exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @return timer + */ + public static Timer getNamedTimer(String timerName, int todoFlags) { + return getNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); + } + + /** + * Get a timer of the given string name for the given thread. If no such timer + * exists yet, then it will be newly created. + * + * @param timerName the name of the timer + * @param todoFlags + * @param threadId of the thread to track, or 0 if only system clock should be + * tracked + * @return timer + */ + public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { + Timer key = new Timer(timerName, todoFlags, threadId); + Timer previous = registeredTimers.putIfAbsent(key, key); + if (previous != null) { + return previous; + } + // else + return key; + } + + /** + * Collect the total times measured by all known named timers of the given name. + * + * @param timerName + * @return timer + */ + public static Timer getNamedTotalTimer(String timerName) { + long totalCpuTime = 0; + long totalSystemTime = 0; + int measurements = 0; + int threadCount = 0; + int todoFlags = RECORD_NONE; + Timer previousTimer = null; + for (Map.Entry entry : registeredTimers.entrySet()) { + if (entry.getValue().name.equals(timerName)) { + previousTimer = entry.getValue(); + threadCount += 1; + totalCpuTime += previousTimer.totalCpuTime; + totalSystemTime += previousTimer.totalWallTime; + measurements += previousTimer.measurements; + todoFlags |= previousTimer.todoFlags; + } + } + + if (threadCount == 1) { + return previousTimer; + } else { + Timer result = new Timer(timerName, todoFlags, 0); + result.totalCpuTime = totalCpuTime; + result.totalWallTime = totalSystemTime; + result.measurements = measurements; + result.threadCount = threadCount; + return result; + } + } + + public static void logAllNamedTimers(String timerName) { + for (Map.Entry entry : registeredTimers.entrySet()) { + if (entry.getValue().name.equals(timerName)) { + entry.getValue().log(); + } + } + } + + @Override + public int hashCode() { + // Jenkins hash, see http://www.burtleburtle.net/bob/hash/doobs.html and also + // http://en.wikipedia.org/wiki/Jenkins_hash_function. + int hash = name.hashCode(); + hash += (hash << 10); + hash ^= (hash >> 6); + hash += Long.valueOf(threadId).hashCode(); + hash += (hash << 10); + hash ^= (hash >> 6); + hash += Integer.valueOf(todoFlags).hashCode(); + hash += (hash << 10); + hash ^= (hash >> 6); + + hash += (hash << 3); + hash ^= (hash >> 11); + hash += (hash << 15); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } else if (obj == null) { + return false; + } else if (getClass() != obj.getClass()) { + return false; + } else if (threadId == ((Timer) obj).threadId && todoFlags == ((Timer) obj).todoFlags + && name.equals(((Timer) obj).name)) { + return true; + } else { + return false; + } + } + + protected static long getThreadCpuTime(long threadId) { + if (threadId == 0) { // generally invalid + return 0; + } else { + return tmxb.getThreadCpuTime(threadId); + } + } + +} From 5a7cd8972ef5d57ed749c6a3d6bc07334f69b256 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:48:19 +0200 Subject: [PATCH 0896/1255] License header --- .../rulewerk/core/model/api/Command.java | 20 +++++++++++++++++++ .../rulewerk/parser/CommandParserTest.java | 20 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 855652158..1634ae277 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.model.api; +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.List; import org.semanticweb.rulewerk.core.model.implementation.Serializer; diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java index 93c527d1d..f81d83088 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.parser; +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import org.junit.Test; From d07e95c1838ed5b171af7f80e77e422c3fc66095 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 12 Aug 2020 23:49:36 +0200 Subject: [PATCH 0897/1255] New module for interpreting commands --- pom.xml | 13 +- rulewerk-commands/LICENSE.txt | 201 ++++++++++++++++++ rulewerk-commands/pom.xml | 31 +++ .../commands/AssertCommandInterpreter.java | 66 ++++++ .../commands/CommandExecutionException.java | 44 ++++ .../rulewerk/commands/CommandInterpreter.java | 58 +++++ .../commands/HelpCommandInterpreter.java | 59 +++++ .../rulewerk/commands/Interpreter.java | 84 ++++++++ .../commands/LoadCommandInterpreter.java | 61 ++++++ .../commands/QueryCommandInterpreter.java | 87 ++++++++ .../commands/ReasonCommandInterpreter.java | 59 +++++ rulewerk-examples/pom.xml | 25 ++- 12 files changed, 771 insertions(+), 17 deletions(-) create mode 100644 rulewerk-commands/LICENSE.txt create mode 100644 rulewerk-commands/pom.xml create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java diff --git a/pom.xml b/pom.xml index 47efeb4a6..17fe39ba5 100644 --- a/pom.xml +++ b/pom.xml @@ -1,7 +1,5 @@ - + 4.0.0 @@ -23,10 +21,11 @@ rulewerk-owlapi rulewerk-graal rulewerk-parser + rulewerk-commands rulewerk-examples rulewerk-client coverage - + @@ -130,7 +129,7 @@ org.codehaus.mojo license-maven-plugin 1.14 - + first @@ -173,7 +172,7 @@ - + @@ -188,7 +187,7 @@ - + diff --git a/rulewerk-commands/LICENSE.txt b/rulewerk-commands/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-commands/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml new file mode 100644 index 000000000..9e4eb1374 --- /dev/null +++ b/rulewerk-commands/pom.xml @@ -0,0 +1,31 @@ + + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.7.0-SNAPSHOT + + + rulewerk-commands + jar + + Rulewerk command execution support + API for interpreting shell commands to control Rulewerk + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java new file mode 100644 index 000000000..0a4d0075e --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -0,0 +1,66 @@ +package org.semanticweb.rulewerk.commands; + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class AssertCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) + throws CommandExecutionException { + + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + interpreter.getReasoner().getKnowledgeBase().addStatement(fact); + } else if (argument.fromRule().isPresent()) { + interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); + } else { + throw new CommandExecutionException("Only facts and rules can be asserted."); + } + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be added to the knowledge base\n" + + "Reasoning needs to be invoked after finishing addition of statements."; + } + + @Override + public String getSynopsis() { + return "add facts and rules to the knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java new file mode 100644 index 000000000..9b9a5c6b0 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java @@ -0,0 +1,44 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; + +public class CommandExecutionException extends RulewerkException { + + /** + * Generated serial version UID + */ + private static final long serialVersionUID = 1479091500621334935L; + + public CommandExecutionException(Throwable cause) { + super(cause); + } + + public CommandExecutionException(String message, Throwable cause) { + super(message, cause); + } + + public CommandExecutionException(String message) { + super(message); + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java new file mode 100644 index 000000000..7959a3376 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java @@ -0,0 +1,58 @@ +package org.semanticweb.rulewerk.commands; + +import org.semanticweb.rulewerk.core.model.api.Command; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for classes that interpret (execute) specific commands. + * + * @author Markus Kroetzsch + * + */ +public interface CommandInterpreter { + + /** + * Execute the commands in the context of the given reasoner and output stream. + * + * @param command command to be interpreted + * @param interpreter surrounding interpreter that provides the execution + * context + */ + void run(Command command, Interpreter interpreter) throws CommandExecutionException; + + /** + * Return a text that describes command use and parameters, using the given + * command name. The output should start with a "Usage:" line, followed by + * single-space-indented parameter descriptions. + * + * @return help message + */ + String getHelp(String commandName); + + /** + * Returns a short line describing the purpose of the command. + * + * @return short command synopsis + */ + String getSynopsis(); + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java new file mode 100644 index 000000000..a94b0fa30 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -0,0 +1,59 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.TermType; + +public class HelpCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 0) { + interpreter.getOut().println("Available commands:"); + for (String commandName : interpreter.commandInterpreters.keySet()) { + interpreter.getOut().println( + " @" + commandName + ": " + interpreter.commandInterpreters.get(commandName).getSynopsis()); + } + } else if (command.getArguments().size() == 1 && command.getArguments().get(0).fromTerm().isPresent() + && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); + if (interpreter.commandInterpreters.containsKey(helpCommand)) { + interpreter.getOut().println(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); + } else { + interpreter.getOut().println("Command '" + helpCommand + "' not known."); + } + } else { + interpreter.getOut().println(getHelp(command.getName())); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"; + } + + @Override + public String getSynopsis() { + return "print help on available commands"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java new file mode 100644 index 000000000..959a0d6d6 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -0,0 +1,84 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintStream; +import java.util.HashMap; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.slf4j.Logger; + +public class Interpreter { + + final Reasoner reasoner; + final PrintStream out; + final Logger logger; + + final HashMap commandInterpreters = new HashMap<>(); + + public Interpreter(Reasoner reasoner, PrintStream out, Logger logger) { + this.reasoner = reasoner; + this.out = out; + this.logger = logger; + registerDefaultCommandInterpreters(); + } + + public void registerCommandInterpreter(String command, CommandInterpreter commandInterpreter) { + commandInterpreters.put(command, commandInterpreter); + } + + public void runCommands(List commands) throws CommandExecutionException { + for (Command command : commands) { + runCommand(command); + } + } + + public void runCommand(Command command) throws CommandExecutionException { + if (commandInterpreters.containsKey(command.getName())) { + try { + commandInterpreters.get(command.getName()).run(command, this); + } catch (Exception e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } else { + throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); + } + } + + public Reasoner getReasoner() { + return reasoner; + } + + public PrintStream getOut() { + return out; + } + + private void registerDefaultCommandInterpreters() { + registerCommandInterpreter("help", new HelpCommandInterpreter()); + registerCommandInterpreter("assert", new AssertCommandInterpreter()); + registerCommandInterpreter("query", new QueryCommandInterpreter()); + registerCommandInterpreter("reason", new ReasonCommandInterpreter()); + registerCommandInterpreter("load", new LoadCommandInterpreter()); + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java new file mode 100644 index 000000000..b52d078de --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -0,0 +1,61 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileInputStream; +import java.io.FileNotFoundException; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public class LoadCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 1 && command.getArguments().get(0).fromString().isPresent()) { + String fileName = command.getArguments().get(0).fromString().get(); + try { + FileInputStream fileInputStream = new FileInputStream(fileName); + RuleParser.parseInto(interpreter.getReasoner().getKnowledgeBase(), fileInputStream); + } catch (FileNotFoundException e) { + throw new CommandExecutionException(e.getMessage(), e); + } catch (ParsingException e) { + interpreter.getOut().println("Error parsing file: " + e.getMessage()); + } + + } else { + throw new CommandExecutionException(getHelp(command.getName())); + } + + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file"; + } + + @Override + public String getSynopsis() { + return "load a knowledge base from file (in Rulewerk rls format)"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java new file mode 100644 index 000000000..bc8f69056 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -0,0 +1,87 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class QueryCommandInterpreter implements CommandInterpreter { + + public static Term KEYWORD_LIMIT = Expressions.makeAbstractConstant("LIMIT"); + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + + List arguments = command.getArguments(); + PositiveLiteral literal; + + if (arguments.size() > 0 && arguments.get(0).fromPositiveLiteral().isPresent()) { + literal = arguments.get(0).fromPositiveLiteral().get(); + } else { + throw new CommandExecutionException("First argument must be a query literal."); + } + + int limit = -1; + if (arguments.size() == 3 && KEYWORD_LIMIT.equals(arguments.get(1).fromTerm().orElse(null)) + && arguments.get(2).fromTerm().isPresent()) { + try { + limit = Terms.extractInt(arguments.get(2).fromTerm().get()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Invalid limit given: " + arguments.get(3).fromTerm().get()); + } + } else if (arguments.size() != 1) { + throw new CommandExecutionException("Unrecognized arguments"); + } + + Timer timer = new Timer("query"); + timer.start(); + try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { + int count = 0; + while (count != limit && answers.hasNext()) { + interpreter.getOut().println(" " + answers.next()); + count++; + } + timer.stop(); + interpreter.getOut().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + + "ms. Results are " + answers.getCorrectness() + "."); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " [LIMIT ] .\n" + + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" + + " limit: maximal number of results to be shown"; + } + + @Override + public String getSynopsis() { + return "print results to queries"; + } +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java new file mode 100644 index 000000000..13753e8f4 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -0,0 +1,59 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class ReasonCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + + if (command.getArguments().size() > 0) { + throw new CommandExecutionException("This command supports no arguments."); + } + + Timer timer = new Timer("reasoning"); + timer.start(); + try { + interpreter.getReasoner().reason(); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + timer.stop(); + interpreter.getOut() + .println("Loading and materialization finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " ."; + } + + @Override + public String getSynopsis() { + return "load data and compute conclusions from knowledge base"; + } + +} diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index d4abb343b..975a3b56d 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -37,16 +37,21 @@ rulewerk-graal ${project.version}
- - ${project.groupId} - rulewerk-parser - ${project.version} - - - ${project.groupId} - rulewerk-vlog - ${project.version} - + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + org.slf4j slf4j-log4j12 From 51ae716b8baa7676b3e1881825a6e2d27239a87a Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 13 Aug 2020 17:52:23 +0200 Subject: [PATCH 0898/1255] fix bug #182 query answering correctness when vLog does not know predicate. --- .../rulewerk/reasoner/vlog/VLogReasoner.java | 179 ++++++++++-------- .../reasoner/vlog/AddDataSourceTest.java | 22 +-- .../vlog/QueryAnsweringCorrectnessTest.java | 95 +++++++++- 3 files changed, 207 insertions(+), 89 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 8f17d699f..3340b0bd3 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -100,7 +100,7 @@ public VLogReasoner(KnowledgeBase knowledgeBase) { this.knowledgeBase = knowledgeBase; this.knowledgeBase.addListener(this); - setLogLevel(this.internalLogLevel); + this.setLogLevel(this.internalLogLevel); } @Override @@ -111,7 +111,7 @@ public KnowledgeBase getKnowledgeBase() { @Override public void setAlgorithm(final Algorithm algorithm) { Validate.notNull(algorithm, "Algorithm cannot be null!"); - validateNotClosed(); + this.validateNotClosed(); this.algorithm = algorithm; } @@ -122,7 +122,7 @@ public Algorithm getAlgorithm() { @Override public void setReasoningTimeout(Integer seconds) { - validateNotClosed(); + this.validateNotClosed(); if (seconds != null) { Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); } @@ -136,7 +136,7 @@ public Integer getReasoningTimeout() { @Override public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - validateNotClosed(); + this.validateNotClosed(); Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); this.ruleRewriteStrategy = ruleRewritingStrategy; } @@ -145,29 +145,33 @@ public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } + + @Override + public Correctness getCorrectness() { + return this.correctness; + } /* * TODO Due to automatic predicate renaming, it can happen that an EDB predicate * cannot be queried after loading unless reasoning has already been invoked * (since the auxiliary rule that imports the EDB facts to the "real" predicate * must be used). This issue could be weakened by rewriting queries to - * (single-source) EDB predicates internally when in such a state, + * (single-source) EDB predicates internally when in such a state. */ - // @Override void load() throws IOException { - validateNotClosed(); + this.validateNotClosed(); switch (this.reasonerState) { case KB_NOT_LOADED: - loadKnowledgeBase(); + this.loadKnowledgeBase(); break; case KB_LOADED: case MATERIALISED: // do nothing, all KB is already loaded break; case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); + this.resetReasoner(); + this.loadKnowledgeBase(); default: break; } @@ -183,16 +187,16 @@ void loadKnowledgeBase() throws IOException { } // 1. vLog is initialized by loading VLog data sources - loadVLogDataSources(vLogKB); + this.loadVLogDataSources(vLogKB); // 2. in-memory data is loaded - loadInMemoryDataSources(vLogKB); - validateDataSourcePredicateArities(vLogKB); + this.loadInMemoryDataSources(vLogKB); + this.validateDataSourcePredicateArities(vLogKB); - loadFacts(vLogKB); + this.loadFacts(vLogKB); // 3. rules are loaded - loadRules(vLogKB); + this.loadRules(vLogKB); this.reasonerState = ReasonerState.KB_LOADED; @@ -213,9 +217,9 @@ void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { } void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { - vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); + vLogKB.getEdbPredicates().forEach((k, v) -> this.loadInMemoryDataSource(v.getDataSource(), k)); - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> this.loadInMemoryDataSource(k.getDataSource(), v)); } void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { @@ -223,7 +227,7 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica final VLogInMemoryDataSource inMemoryDataSource = (VLogInMemoryDataSource) dataSource; try { - load(predicate, inMemoryDataSource); + this.load(predicate, inMemoryDataSource); } catch (final EDBConfigurationException e) { throw new RulewerkRuntimeException("Invalid data sources configuration!", e); } @@ -252,9 +256,10 @@ void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSo */ void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { - vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); + vLogKB.getEdbPredicates().forEach((k, v) -> this.validateDataSourcePredicateArity(k, v.getDataSource())); - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); + vLogKB.getAliasesForEdbPredicates() + .forEach((k, v) -> this.validateDataSourcePredicateArity(v, k.getDataSource())); } /** @@ -325,23 +330,23 @@ void loadRules(final VLogKnowledgeBase vLogKB) { @Override public boolean reason() throws IOException { - validateNotClosed(); + this.validateNotClosed(); switch (this.reasonerState) { case KB_NOT_LOADED: - load(); - runChase(); + this.load(); + this.runChase(); break; case KB_LOADED: - runChase(); + this.runChase(); break; case KB_CHANGED: - resetReasoner(); - load(); - runChase(); + this.resetReasoner(); + this.load(); + this.runChase(); break; case MATERIALISED: - runChase(); + this.runChase(); break; default: break; @@ -383,29 +388,49 @@ private void runChase() { @Override public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); + this.validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - karmaresearch.vlog.QueryResultIterator queryResultIterator; + final karmaresearch.vlog.QueryResultIterator queryResultIterator; try { final int predicateId = this.vLog.getPredicateId(vLogAtom.getPredicate()); - final long[] terms = extractTerms(vLogAtom.getTerms()); + final long[] terms = this.extractTerms(vLogAtom.getTerms()); queryResultIterator = this.vLog.query(predicateId, terms, true, filterBlanks); } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); + return this.createEmptyResultIterator(query); } - logWarningOnCorrectness(); + this.logWarningOnCorrectness(this.correctness); return new VLogFastQueryResultIterator(queryResultIterator, this.correctness, this.vLog); } + private QueryResultIterator createEmptyResultIterator(final PositiveLiteral query) { + final Correctness answerCorrectness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(answerCorrectness); + return new EmptyQueryResultIterator(answerCorrectness); + } + + private Correctness getCorrectnessUnknownPredicate(final PositiveLiteral query) { + final Correctness answerCorrectness; + if (this.reasonerState == ReasonerState.MATERIALISED) { + this.warnUnknownPredicate(query); + answerCorrectness = Correctness.SOUND_AND_COMPLETE; + } else { + answerCorrectness = Correctness.SOUND_BUT_INCOMPLETE; + } + return answerCorrectness; + } + + private void warnUnknownPredicate(final PositiveLiteral query) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the materialised knowledge base. Answer must be empty!"); + } + /** * Utility method copied from {@link karmaresearch.vlog.VLog}. * @@ -441,7 +466,7 @@ private long[] extractTerms(karmaresearch.vlog.Term[] terms) throws NotStartedEx @Override public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); + this.validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); @@ -452,18 +477,22 @@ public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean include } catch (NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (NonExistingPredicateException e) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - result = 0; + return this.createEmptyResultCount(query); } - logWarningOnCorrectness(); + this.logWarningOnCorrectness(this.correctness); return new QueryAnswerCountImpl(this.correctness, result); } + private QueryAnswerCount createEmptyResultCount(final PositiveLiteral query) { + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(correctness); + return new QueryAnswerCountImpl(correctness, 0); + } + @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { - validateBeforeQuerying(query); + this.validateBeforeQuerying(query); Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); @@ -475,33 +504,33 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answers are therefore empty."); + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(correctness); + return correctness; } - - logWarningOnCorrectness(); + this.logWarningOnCorrectness(this.correctness); return this.correctness; } private void validateBeforeQuerying(final PositiveLiteral query) { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before reasoner is loaded!"); } Validate.notNull(query, "Query atom must not be null!"); } @Override public Correctness forEachInference(InferenceAction action) throws IOException { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Obtaining inferences is not alowed before reasoner is loaded!"); } - final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); + final Set toBeQueriedHeadPredicates = this.getKnowledgeBasePredicates(); for (final Predicate predicate : toBeQueriedHeadPredicates) { - final PositiveLiteral queryAtom = getQueryAtom(predicate); + final PositiveLiteral queryAtom = this.getQueryAtom(predicate); final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { while (answers.hasNext()) { @@ -516,23 +545,19 @@ public Correctness forEachInference(InferenceAction action) throws IOException { } } - logWarningOnCorrectness(); - return this.correctness; - } - - public Correctness getCorrectness() { + this.logWarningOnCorrectness(this.correctness); return this.correctness; } - private void logWarningOnCorrectness() { - if (this.correctness != Correctness.SOUND_AND_COMPLETE) { + private void logWarningOnCorrectness(final Correctness correctness) { + if (correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); } } @Override public void resetReasoner() { - validateNotClosed(); + this.validateNotClosed(); this.reasonerState = ReasonerState.KB_NOT_LOADED; this.vLog.stop(); LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); @@ -552,7 +577,7 @@ public void close() { @Override public void setLogLevel(LogLevel logLevel) { - validateNotClosed(); + this.validateNotClosed(); Validate.notNull(logLevel, "Log level cannot be null!"); this.internalLogLevel = logLevel; this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); @@ -565,33 +590,33 @@ public LogLevel getLogLevel() { @Override public void setLogFile(String filePath) { - validateNotClosed(); + this.validateNotClosed(); this.vLog.setLogFile(filePath); } @Override public boolean isJA() { - return checkAcyclicity(AcyclicityNotion.JA); + return this.checkAcyclicity(AcyclicityNotion.JA); } @Override public boolean isRJA() { - return checkAcyclicity(AcyclicityNotion.RJA); + return this.checkAcyclicity(AcyclicityNotion.RJA); } @Override public boolean isMFA() { - return checkAcyclicity(AcyclicityNotion.MFA); + return this.checkAcyclicity(AcyclicityNotion.MFA); } @Override public boolean isRMFA() { - return checkAcyclicity(AcyclicityNotion.RMFA); + return this.checkAcyclicity(AcyclicityNotion.RMFA); } @Override public boolean isMFC() { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Checking rules acyclicity is not allowed before loading!"); @@ -608,11 +633,11 @@ public boolean isMFC() { @Override public CyclicityResult checkForCycles() { - final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); + final boolean acyclic = this.isJA() || this.isRJA() || this.isMFA() || this.isRMFA(); if (acyclic) { return CyclicityResult.ACYCLIC; } else { - final boolean cyclic = isMFC(); + final boolean cyclic = this.isMFC(); if (cyclic) { return CyclicityResult.CYCLIC; } @@ -624,32 +649,32 @@ public CyclicityResult checkForCycles() { public void onStatementsAdded(List statementsAdded) { // TODO more elaborate materialisation state handling - updateReasonerToKnowledgeBaseChanged(); + this.updateReasonerToKnowledgeBaseChanged(); // updateCorrectnessOnStatementsAdded(statementsAdded); - updateCorrectnessOnStatementsAdded(); + this.updateCorrectnessOnStatementsAdded(); } @Override public void onStatementAdded(Statement statementAdded) { // TODO more elaborate materialisation state handling - updateReasonerToKnowledgeBaseChanged(); + this.updateReasonerToKnowledgeBaseChanged(); // updateCorrectnessOnStatementAdded(statementAdded); - updateCorrectnessOnStatementsAdded(); + this.updateCorrectnessOnStatementsAdded(); } @Override public void onStatementRemoved(Statement statementRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); + this.updateReasonerToKnowledgeBaseChanged(); + this.updateCorrectnessOnStatementsRemoved(); } @Override public void onStatementsRemoved(List statementsRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); + this.updateReasonerToKnowledgeBaseChanged(); + this.updateCorrectnessOnStatementsRemoved(); } Set getKnowledgeBasePredicates() { @@ -677,10 +702,10 @@ private PositiveLiteral getQueryAtom(final Predicate predicate) { } private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); + this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { try { - load(); + this.load(); } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 throw new RulewerkRuntimeException(e); } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java index ed1aa4f23..5b329f9ba 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java @@ -77,12 +77,12 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep reasoner.reason(); try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeUniversalVariable("x")), false)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateParity1, Expressions.makeUniversalVariable("x")), false)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -103,12 +103,12 @@ public void testAddDataSourceBeforeLoading() throws IOException { reasoner.load(); try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); } @@ -133,7 +133,7 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } @@ -141,7 +141,7 @@ public void testAddDataSourceAfterLoading() throws IOException { try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, queryResult.getCorrectness()); } } } @@ -164,14 +164,14 @@ public void testAddDataSourceAfterReasoning() throws IOException { try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { - assertEquals(csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); } // there is no fact for predicate Q loaded in the reasoner try (final QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { assertFalse(queryResult.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, queryResult.getCorrectness()); } } } @@ -233,8 +233,8 @@ public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOExce reasoner.reason(); try (QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { - final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); - expectedAnswers.addAll(csvFile_c_d_Content); + final Set> expectedAnswers = new HashSet<>(this.csvFile_c1_c2_Content); + expectedAnswers.addAll(this.csvFile_c_d_Content); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); @@ -258,7 +258,7 @@ public void testAddDataSourceAndFactsForPredicateAfterReasoning() throws IOExcep reasoner.reason(); try (QueryResultIterator queryResult = reasoner.answerQuery( Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { - final Set> expectedAnswers = new HashSet<>(csvFile_c1_c2_Content); + final Set> expectedAnswers = new HashSet<>(this.csvFile_c1_c2_Content); expectedAnswers.add(Arrays.asList(Expressions.makeAbstractConstant("a"))); assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java index 583b34229..9f66c9b0a 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java @@ -45,6 +45,7 @@ import org.semanticweb.rulewerk.core.reasoner.Algorithm; import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; @@ -94,7 +95,7 @@ public void testCorrectnessKBChanges() throws IOException { // there are no facts for Q-1 predicate try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { assertFalse(resultIterator.hasNext()); - assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); } reasoner.reason(); @@ -411,6 +412,98 @@ public void testCorrectnessNoKBChanges() throws IOException { } } + @Test + public void answerQuery_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void answerQuery_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void countQueryAnswers_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + final QueryAnswerCount resultIterator = reasoner.countQueryAnswers(ruleHeadQx); + assertEquals(0, resultIterator.getCount()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + + @Test + public void countQueryAnswers_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + final QueryAnswerCount resultIterator = reasoner.countQueryAnswers(ruleBodyPx); + assertEquals(0, resultIterator.getCount()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + + } + + @Test + public void exportQueryAnswersToCsv_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + // TODO mock file or something + String csvFilePath = ".csv"; + Correctness correctness = reasoner.exportQueryAnswersToCsv(ruleHeadQx, csvFilePath, true); + + assertEquals(Correctness.SOUND_AND_COMPLETE, correctness); + } + + } + + @Test + public void exportQueryAnswersToCsv_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + // TODO mock file or something + String csvFilePath = ".csv"; + Correctness correctness = reasoner.exportQueryAnswersToCsv(ruleBodyPx, csvFilePath, true); + + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, correctness); + } + } + @Test public void testMaterialisationIncomplete() throws IOException { final Variable y = Expressions.makeUniversalVariable("y"); From 4ed5e084fb368ce329d6a07e5451120fe4779f7c Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 13 Aug 2020 17:54:56 +0200 Subject: [PATCH 0899/1255] make ReasonerStateException message for querying before reason() more explicit --- .../org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index 3340b0bd3..f49bfef7a 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -515,7 +515,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St private void validateBeforeQuerying(final PositiveLiteral query) { this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before reasoner is loaded!"); + throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before Reasoner#reason() was first called!"); } Validate.notNull(query, "Query atom must not be null!"); } From 1feedb724c7eb97edb818d66cb338518cc75ea13 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 18 Aug 2020 17:14:23 +0200 Subject: [PATCH 0900/1255] Remove support for String and Iri from Argument --- .../commands/LoadCommandInterpreter.java | 12 +- .../rulewerk/core/model/api/Argument.java | 128 ++---------------- .../rulewerk/core/model/api/Command.java | 2 - .../rulewerk/core/model/ArgumentTest.java | 50 +++---- .../rulewerk/examples/ExamplesUtils.java | 2 +- .../rulewerk/parser/DirectiveHandler.java | 58 ++------ .../rulewerk/parser/javacc/JavaCCParser.jj | 15 +- .../rulewerk/parser/CommandParserTest.java | 15 +- .../rulewerk/parser/DirectiveHandlerTest.java | 69 ++-------- 9 files changed, 87 insertions(+), 264 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index b52d078de..dde5e9d18 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -24,6 +24,7 @@ import java.io.FileNotFoundException; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -31,8 +32,15 @@ public class LoadCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - if (command.getArguments().size() == 1 && command.getArguments().get(0).fromString().isPresent()) { - String fileName = command.getArguments().get(0).fromString().get(); + if (command.getArguments().size() == 1) { + String fileName; + try { + fileName = Terms.extractString( + command.getArguments().get(0).fromTerm().orElseThrow(() -> new CommandExecutionException( + "Expected string for file name, but did not find a term."))); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Failed to convert term given for file name to string."); + } try { FileInputStream fileInputStream = new FileInputStream(fileName); RuleParser.parseInto(interpreter.getReasoner().getKnowledgeBase(), fileInputStream); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index 7fe488150..27604edb3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -20,7 +20,6 @@ * #L% */ -import java.net.URI; import java.util.Optional; import java.util.function.Function; @@ -37,16 +36,13 @@ private Argument() { /** * Apply a function to the contained value. * - * @param stringHandler the function to apply to a string argument - * @param iriHandler the function to apply to an IRI * @param termHandler the function to apply to a Term * @param ruleHandler the function to apply to a Rule * @param positiveLiteralHandler the function to apply to a Literal * * @return the value returned by the appropriate handler function */ - public abstract V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public abstract V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler); @@ -76,80 +72,6 @@ protected Optional isEqual(Object other) { return Optional.empty(); } - /** - * Create an argument containing a String. - * - * @param value the string value - * - * @return An argument containing the given string value - */ - public static Argument string(String value) { - return new Argument() { - @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, - Function ruleHandler, - Function positiveLiteralHandler) { - return stringHandler.apply(value); - } - - @Override - public boolean equals(Object other) { - Optional maybeEquals = isEqual(other); - - if (maybeEquals.isPresent()) { - return maybeEquals.get(); - } - - Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false, rule -> false, - positiveLiteral -> false); - } - - @Override - public int hashCode() { - return 41 * value.hashCode(); - } - }; - } - - /** - * Create an argument containing a IRI. - * - * @param value the IRI value - * - * @return An argument containing the given IRI value - */ - public static Argument iri(URI value) { - return new Argument() { - @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, - Function ruleHandler, - Function positiveLiteralHandler) { - return iriHandler.apply(value); - } - - @Override - public boolean equals(Object other) { - Optional maybeEquals = isEqual(other); - - if (maybeEquals.isPresent()) { - return maybeEquals.get(); - } - - Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false, rule -> false, - positiveLiteral -> false); - } - - @Override - public int hashCode() { - return 43 * value.hashCode(); - } - }; - } - /** * Create an argument containing a Term. * @@ -160,8 +82,7 @@ public int hashCode() { public static Argument term(Term value) { return new Argument() { @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler) { return termHandler.apply(value); @@ -176,8 +97,7 @@ public boolean equals(Object other) { } Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value), rule -> false, - positiveLiteral -> false); + return otherArgument.apply(term -> term.equals(value), rule -> false, positiveLiteral -> false); } @Override @@ -197,8 +117,7 @@ public int hashCode() { public static Argument rule(Rule value) { return new Argument() { @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler) { return ruleHandler.apply(value); @@ -213,8 +132,7 @@ public boolean equals(Object other) { } Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> rule.equals(value), - positiveLiteral -> false); + return otherArgument.apply(term -> false, rule -> rule.equals(value), positiveLiteral -> false); } @Override @@ -234,8 +152,7 @@ public int hashCode() { public static Argument positiveLiteral(PositiveLiteral value) { return new Argument() { @Override - public V apply(Function stringHandler, - Function iriHandler, Function termHandler, + public V apply(Function termHandler, Function ruleHandler, Function positiveLiteralHandler) { return positiveLiteralHandler.apply(value); @@ -250,7 +167,7 @@ public boolean equals(Object other) { } Argument otherArgument = (Argument) other; - return otherArgument.apply(str -> false, iri -> false, term -> false, rule -> false, + return otherArgument.apply(term -> false, rule -> false, positiveLiteral -> positiveLiteral.equals(value)); } @@ -261,28 +178,6 @@ public int hashCode() { }; } - /** - * Create an optional from a (possible) string value. - * - * @return An optional containing the contained string, or an empty Optional if - * the argument doesn't contain a string. - */ - public Optional fromString() { - return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), - value -> Optional.empty()); - } - - /** - * Create an optional from a (possible) IRI value. - * - * @return An optional containing the contained IRI, or an empty Optional if the - * argument doesn't contain a IRI. - */ - public Optional fromIri() { - return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty(), value -> Optional.empty(), - value -> Optional.empty()); - } - /** * Create an optional from a (possible) Term value. * @@ -290,8 +185,7 @@ public Optional fromIri() { * the argument doesn't contain a Term. */ public Optional fromTerm() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of, value -> Optional.empty(), - value -> Optional.empty()); + return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty()); } /** @@ -301,8 +195,7 @@ public Optional fromTerm() { * the argument doesn't contain a Rule. */ public Optional fromRule() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), Optional::of, - value -> Optional.empty()); + return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); } /** @@ -312,7 +205,6 @@ public Optional fromRule() { * Optional if the argument doesn't contain a PositiveLitreal. */ public Optional fromPositiveLiteral() { - return this.apply(value -> Optional.empty(), value -> Optional.empty(), value -> Optional.empty(), - value -> Optional.empty(), Optional::of); + return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 1634ae277..3af723089 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -76,8 +76,6 @@ public String getSyntacticRepresentation() { .append(Serializer.getString(rule.getBody())); } else if (argument.fromPositiveLiteral().isPresent()) { result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); - } else if (argument.fromString().isPresent()) { - result.append(Serializer.getString(argument.fromString().get())); } else { throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java index bcec475d1..76efe55d4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java @@ -21,65 +21,67 @@ */ import static org.junit.Assert.*; -import java.net.URI; - import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class ArgumentTest { - private static final String STRING = "src/test/resources/facts.rls"; - private static final URI IRI = URI.create("https://example.org"); - private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + private static final Term TERM = Expressions.makeDatatypeConstant("some string", + PrefixDeclarationRegistry.XSD_STRING); + private static final PositiveLiteral LITERAL = Expressions.makePositiveLiteral("p", TERM); + private static final Rule RULE = Expressions.makeRule(LITERAL, LITERAL); - private static final Argument STRING_ARGUMENT = Argument.string(STRING); - private static final Argument IRI_ARGUMENT = Argument.iri(IRI); private static final Argument TERM_ARGUMENT = Argument.term(TERM); + private static final Argument LITERAL_ARGUMENT = Argument.positiveLiteral(LITERAL); + private static final Argument RULE_ARGUMENT = Argument.rule(RULE); @Test public void equals_null_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(null)); - assertFalse(IRI_ARGUMENT.equals(null)); + assertFalse(LITERAL_ARGUMENT.equals(null)); + assertFalse(RULE_ARGUMENT.equals(null)); assertFalse(TERM_ARGUMENT.equals(null)); } @Test public void equals_self_returnsTrue() { - assertTrue(STRING_ARGUMENT.equals(STRING_ARGUMENT)); - assertTrue(IRI_ARGUMENT.equals(IRI_ARGUMENT)); + assertTrue(RULE_ARGUMENT.equals(RULE_ARGUMENT)); + assertTrue(LITERAL_ARGUMENT.equals(LITERAL_ARGUMENT)); assertTrue(TERM_ARGUMENT.equals(TERM_ARGUMENT)); } @Test public void equals_equal_returnsTrue() { - assertTrue(STRING_ARGUMENT.equals(Argument.string(STRING))); - assertTrue(IRI_ARGUMENT.equals(Argument.iri(IRI))); + assertTrue(RULE_ARGUMENT.equals(Argument.rule(RULE))); + assertTrue(LITERAL_ARGUMENT.equals(Argument.positiveLiteral(LITERAL))); assertTrue(TERM_ARGUMENT.equals(Argument.term(TERM))); } @Test public void equals_notEqualButSameType_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(Argument.string(STRING + "test"))); - assertFalse(IRI_ARGUMENT.equals(Argument.iri(URI.create("https://example.com")))); + assertFalse(RULE_ARGUMENT.equals(Argument.rule(Expressions.makeRule(LITERAL, LITERAL, LITERAL)))); + assertFalse(LITERAL_ARGUMENT.equals(Argument.positiveLiteral(Expressions.makePositiveLiteral("q", TERM)))); assertFalse(TERM_ARGUMENT - .equals(Argument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); + .equals(Argument.term(Expressions.makeDatatypeConstant("another string", "https://example.com")))); } @Test public void equals_differentType_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals(IRI_ARGUMENT)); - assertFalse(STRING_ARGUMENT.equals(TERM_ARGUMENT)); - assertFalse(IRI_ARGUMENT.equals(STRING_ARGUMENT)); - assertFalse(IRI_ARGUMENT.equals(TERM_ARGUMENT)); - assertFalse(TERM_ARGUMENT.equals(STRING_ARGUMENT)); - assertFalse(TERM_ARGUMENT.equals(IRI_ARGUMENT)); + assertFalse(RULE_ARGUMENT.equals(LITERAL_ARGUMENT)); + assertFalse(RULE_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(LITERAL_ARGUMENT.equals(RULE_ARGUMENT)); + assertFalse(LITERAL_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(RULE_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(LITERAL_ARGUMENT)); } @Test public void equals_String_returnsFalse() { - assertFalse(STRING_ARGUMENT.equals((Object) "test")); - assertFalse(IRI_ARGUMENT.equals((Object) "test")); + assertFalse(RULE_ARGUMENT.equals((Object) "test")); + assertFalse(LITERAL_ARGUMENT.equals((Object) "test")); assertFalse(TERM_ARGUMENT.equals((Object) "test")); } } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index a0801f415..87745d975 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -73,7 +73,7 @@ public static void configureLogging() { final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: - consoleAppender.setThreshold(Level.INFO); + consoleAppender.setThreshold(Level.ERROR); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 1be48eba7..2a8de3aa9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -21,15 +21,13 @@ */ import java.io.File; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; import java.nio.file.InvalidPathException; import java.util.List; import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; @@ -54,8 +52,7 @@ public interface DirectiveHandler { * directive, or the number of arguments is invalid. * @return a {@code T} instance corresponding to the given arguments. */ - public T handleDirective(List arguments, final SubParserFactory subParserFactory) - throws ParsingException; + public T handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** * Validate the provided number of arguments to the directive statement. @@ -87,8 +84,12 @@ public static void validateNumberOfArguments(final List arguments, fin */ public static String validateStringArgument(final Argument argument, final String description) throws ParsingException { - return argument.fromString() - .orElseThrow(() -> new ParsingException("description \"" + argument + "\" is not a string.")); + try { + return Terms.extractString(argument.fromTerm().orElseThrow( + () -> new ParsingException("Expected string for " + description + ", but did not find a term."))); + } catch (IllegalArgumentException e) { + throw new ParsingException("Failed to convert term given for " + description + " to string."); + } } /** @@ -110,50 +111,12 @@ public static File validateFilenameArgument(final Argument argument, final Strin // we don't care about the actual path, just that there is one. file.toPath(); } catch (InvalidPathException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a valid file path.", e); + throw new ParsingException(description + "\"" + fileName + "\" is not a valid file path.", e); } return file; } - /** - * Validate that the provided argument is an IRI. - * - * @param argument the argument to validate - * @param description a description of the argument, used in constructing the - * error message. - * - * @throws ParsingException when the given argument is not an IRI. - * - * @return the contained IRI. - */ - public static URI validateIriArgument(final Argument argument, final String description) - throws ParsingException { - return argument.fromIri() - .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not an IRI.")); - } - - /** - * Validate that the provided argument is a {@link URL}. - * - * @param argument the argument to validate - * @param description a description of the argument, used in constructing the - * error message. - * - * @throws ParsingException when the given argument is not a valid {@link URL}. - * - * @return the {@link URL} corresponding to the contained IRI. - */ - public static URL validateUrlArgument(final Argument argument, final String description) - throws ParsingException { - URI iri = DirectiveHandler.validateIriArgument(argument, description); - try { - return iri.toURL(); - } catch (MalformedURLException e) { - throw new ParsingException(description + "\"" + argument + "\" is not a valid URL.", e); - } - } - /** * Validate that the provided argument is a {@link Term}. * @@ -165,8 +128,7 @@ public static URL validateUrlArgument(final Argument argument, final String desc * * @return the contained {@link Term}. */ - public static Term validateTermArgument(final Argument argument, final String description) - throws ParsingException { + public static Term validateTermArgument(final Argument argument, final String description) throws ParsingException { return argument.fromTerm() .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not a Term.")); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 59e2c3f85..ef20b419f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -57,11 +57,10 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.script.Argument; -import org.semanticweb.rulewerk.core.script.Command; - import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class JavaCCParser extends JavaCCParserBase @@ -455,16 +454,6 @@ LinkedList< Argument > Arguments() throws PrefixDeclarationException : { } { ( LOOKAHEAD(ruleNoDot()) rule = ruleNoDot() { argument = Argument.rule(rule); } | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = Argument.positiveLiteral(positiveLiteral); } - | LOOKAHEAD(String()) str = String() { argument = Argument.string(str); } - | LOOKAHEAD(absoluteIri()) str = absoluteIri() { - URI url; - try { - url = new URI(str); - } catch (URISyntaxException e) { - throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); - } - argument = Argument.iri(url); - } | t = term(FormulaContext.HEAD) { argument = Argument.term(t); } ) [rest = Arguments()] { rest.addFirst(argument); diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java index f81d83088..027cff739 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -23,6 +23,7 @@ import static org.junit.Assert.*; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; @@ -35,9 +36,19 @@ public void parseCommand() throws ParsingException { assertEquals("query", command.getName()); assertEquals(5, command.getArguments().size()); assertTrue(command.getArguments().get(0).fromRule().isPresent()); - assertTrue(command.getArguments().get(1).fromString().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); assertTrue(command.getArguments().get(2).fromTerm().isPresent()); assertTrue(command.getArguments().get(3).fromPositiveLiteral().isPresent()); - assertTrue(command.getArguments().get(4).fromIri().isPresent()); + assertTrue(command.getArguments().get(4).fromTerm().isPresent()); } + +// @Test +// public void parseCommandTest() throws ParsingException { +// String input = "@myprefix wdqs: ."; +//// String input = "@mysource diseaseId[2]: sparql(wdqs:sparql, \"disease,doid\", \"?disease wdt:P699 ?doid .\") ."; +// Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); +// for (Argument argument : command.getArguments()) { +// System.out.println("-"); +// } +// } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 725d54dc1..66f89562a 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -22,87 +22,48 @@ import static org.junit.Assert.*; import java.io.File; -import java.net.MalformedURLException; -import java.net.URI; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class DirectiveHandlerTest { private static final String STRING = "src/test/resources/facts.rls"; - private static final URI IRI = URI.create("https://example.org"); - private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + private static final Term STRINGTERM = Expressions.makeDatatypeConstant(STRING, + PrefixDeclarationRegistry.XSD_STRING); + private static final Term INTTERM = Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INT); - private static final Argument STRING_ARGUMENT = Argument.string(STRING); - private static final Argument IRI_ARGUMENT = Argument.iri(IRI); - private static final Argument TERM_ARGUMENT = Argument.term(TERM); + private static final Argument TERM_STRING_ARGUMENT = Argument.term(STRINGTERM); + private static final Argument TERM_INT_ARGUMENT = Argument.term(INTTERM); @Test public void validateStringArgument_stringArgument_succeeds() throws ParsingException { - assertEquals(STRING, DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument")); + assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_STRING_ARGUMENT, "string argument")); } - - @Test(expected = ParsingException.class) - public void validateStringArgument_iriArgument_throws() throws ParsingException { - DirectiveHandler.validateStringArgument(IRI_ARGUMENT, "string argument"); - } - - @Test(expected = ParsingException.class) - public void validateStringArgument_termArgument_throws() throws ParsingException { - DirectiveHandler.validateStringArgument(TERM_ARGUMENT, "string argument"); - } - - @Test - public void validateIriArgument_iriArgument_succeeds() throws ParsingException { - assertEquals(IRI, DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument")); - } - - @Test(expected = ParsingException.class) - public void validateIriArgument_StringArgument_throws() throws ParsingException { - DirectiveHandler.validateIriArgument(STRING_ARGUMENT, "iri argument"); - } - + @Test(expected = ParsingException.class) - public void validateIriArgument_termArgument_throws() throws ParsingException { - DirectiveHandler.validateIriArgument(TERM_ARGUMENT, "iri argument"); + public void validateStringArgument_stringArgument_throws() throws ParsingException { + assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_INT_ARGUMENT, "string argument")); } @Test public void validateTermArgument_termArgument_succeeds() throws ParsingException { - assertEquals(TERM, DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument")); - } - - @Test(expected = ParsingException.class) - public void validateTermArgument_stringArgument_throws() throws ParsingException { - DirectiveHandler.validateTermArgument(STRING_ARGUMENT, "term argument"); - } - - @Test(expected = ParsingException.class) - public void validateTermArgument_iriArgument_throws() throws ParsingException { - DirectiveHandler.validateTermArgument(IRI_ARGUMENT, "term argument"); + assertEquals(STRINGTERM, DirectiveHandler.validateTermArgument(TERM_STRING_ARGUMENT, "term argument")); } @Test public void validateFilenameArgument_filename_succeeds() throws ParsingException { - assertEquals(new File(STRING), DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument")); + assertEquals(new File(STRING), + DirectiveHandler.validateFilenameArgument(TERM_STRING_ARGUMENT, "filename argument")); } @Test public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { - DirectiveHandler.validateFilenameArgument(Argument.string(STRING + "-nonexistant"), + DirectiveHandler.validateFilenameArgument(Argument + .term(Expressions.makeDatatypeConstant(STRING + "-nonexistent", PrefixDeclarationRegistry.XSD_STRING)), "filename argument"); } - @Test - public void validateUrlArgument_url_succeeds() throws ParsingException, MalformedURLException { - assertEquals(IRI.toURL(), DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument")); - } - - @Test(expected = ParsingException.class) - public void validateUrlArgument_invalidUrl_throws() throws ParsingException { - DirectiveHandler.validateUrlArgument(Argument.iri(URI.create("example://test")), "url argument"); - } - } From aca91129a212d83ec6ef3343995af92990ee841b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 18 Aug 2020 17:14:47 +0200 Subject: [PATCH 0901/1255] improved output --- .../rulewerk/commands/ReasonCommandInterpreter.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 13753e8f4..6ecfd8944 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -34,6 +34,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("This command supports no arguments."); } + interpreter.getOut().println("Loading and materializing inferences ..."); + Timer timer = new Timer("reasoning"); timer.start(); try { @@ -42,8 +44,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getOut() - .println("Loading and materialization finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.getOut().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); } @Override @@ -55,5 +56,5 @@ public String getHelp(String commandName) { public String getSynopsis() { return "load data and compute conclusions from knowledge base"; } - + } From 49cabe7dfa94c9141a300c506a0d908c53c5b6e9 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 18 Aug 2020 18:07:00 +0200 Subject: [PATCH 0902/1255] support source and prefix like command arguments --- .../rulewerk/core/model/api/Argument.java | 17 +++++++++ .../rulewerk/parser/javacc/JavaCCParser.jj | 23 +++++++----- .../rulewerk/parser/CommandParserTest.java | 35 +++++++++++++------ 3 files changed, 57 insertions(+), 18 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index 27604edb3..dfbd0c771 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -23,6 +23,8 @@ import java.util.Optional; import java.util.function.Function; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + /** * A tagged union representing the possible types allowed to appear as arguments * in commands and parser directives. @@ -104,6 +106,11 @@ public boolean equals(Object other) { public int hashCode() { return 47 * value.hashCode(); } + + @Override + public String toString() { + return value.toString(); + } }; } @@ -139,6 +146,11 @@ public boolean equals(Object other) { public int hashCode() { return 53 * value.hashCode(); } + + @Override + public String toString() { + return value.toString(); + } }; } @@ -175,6 +187,11 @@ public boolean equals(Object other) { public int hashCode() { return 59 * value.hashCode(); } + + @Override + public String toString() { + return value.toString(); + } }; } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index ef20b419f..e73b4b222 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -160,16 +160,23 @@ KnowledgeBase directive() throws PrefixDeclarationException : { } Command command() throws PrefixDeclarationException : { - Token name; - List< Argument > arguments; + Token name, pn, arity; + LinkedList< Argument > arguments; + String predicateName; + } { name = < CUSTOM_DIRECTIVE > - ( arguments = Arguments() < DOT > { - return new Command(name.image,arguments); - } | - < DOT > { - return new Command(name.image, new LinkedList< Argument >()); - } ) + ( LOOKAHEAD(predicateName() < ARITY >) predicateName = predicateName() arity = < ARITY > < COLON > arguments = Arguments() < DOT > { + arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING))); + return new Command(name.image,arguments); + } + | arguments = Arguments() < DOT > { return new Command(name.image,arguments); } + | pn = < PNAME_NS > arguments = Arguments() < DOT > { + arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(pn.image, PrefixDeclarationRegistry.XSD_STRING))); + return new Command(name.image,arguments); + } + | < DOT > { return new Command(name.image, new LinkedList< Argument >()); } + ) } void statement() throws PrefixDeclarationException : { diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java index 027cff739..3381f05cc 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -22,9 +22,12 @@ import static org.junit.Assert.*; +import java.net.URI; +import java.net.URISyntaxException; + import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; public class CommandParserTest { @@ -41,14 +44,26 @@ public void parseCommand() throws ParsingException { assertTrue(command.getArguments().get(3).fromPositiveLiteral().isPresent()); assertTrue(command.getArguments().get(4).fromTerm().isPresent()); } + + @Test + public void parsePrefix() throws ParsingException, URISyntaxException { + String input = "@myprefix wdqs: ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + assertEquals("wdqs:", Terms.extractString(command.getArguments().get(0).fromTerm().get())); + assertEquals(new URI("https://query.wikidata.org/"), Terms.extractIri(command.getArguments().get(1).fromTerm().get())); + } -// @Test -// public void parseCommandTest() throws ParsingException { -// String input = "@myprefix wdqs: ."; -//// String input = "@mysource diseaseId[2]: sparql(wdqs:sparql, \"disease,doid\", \"?disease wdt:P699 ?doid .\") ."; -// Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); -// for (Argument argument : command.getArguments()) { -// System.out.println("-"); -// } -// } + @Test + public void parseSourceDeclaration() throws ParsingException, URISyntaxException { + String input = "@mysource diseaseId[2]: 123 ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + assertEquals("diseaseId[2]:", Terms.extractString(command.getArguments().get(0).fromTerm().get())); + assertEquals(123, Terms.extractInt(command.getArguments().get(1).fromTerm().get())); + } } From 171af2fe511815e42a4cf8dbebc4c0efe706cc60 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 01:13:00 +0200 Subject: [PATCH 0903/1255] added shell code --- pom.xml | 2 + rulewerk-client/pom.xml | 32 +++++- .../rulewerk/client/shell/CommandReader.java | 90 ++++++++++++++++ .../client/shell/DefaultConfiguration.java | 60 +++++++++++ .../rulewerk/client/shell/PromptProvider.java | 28 +++++ .../client/shell/RulewerkApplication.java | 40 +++++++ .../rulewerk/client/shell/Shell.java | 102 ++++++++++++++++++ .../commands/ExitCommandInterpreter.java | 66 ++++++++++++ 8 files changed, 415 insertions(+), 5 deletions(-) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java diff --git a/pom.xml b/pom.xml index 17fe39ba5..bd2ca0a0d 100644 --- a/pom.xml +++ b/pom.xml @@ -87,6 +87,8 @@ 1.3.1 4.0.4 3.2.0 + 3.16.0 + 1.18 diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 64f043d72..98bca4d59 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -31,11 +31,16 @@ rulewerk-parser ${project.version} - - ${project.groupId} - rulewerk-vlog - ${project.version} - + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + org.slf4j slf4j-log4j12 @@ -51,6 +56,23 @@ maven-shade-plugin ${shade.version} + + + + org.jline + jline + ${jline.version} + + + org.fusesource.jansi + jansi + ${jansi.version} + + + org.jline + jline-terminal-jansi + ${jline.version} + diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java new file mode 100644 index 000000000..ea34fc323 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -0,0 +1,90 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.jline.utils.AttributedString; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public class CommandReader { + + public CommandReader(final LineReader lineReader, final PromptProvider promptProvider) { + super(); + this.lineReader = lineReader; + this.promptProvider = promptProvider; + } + + private final LineReader lineReader; + + private final PromptProvider promptProvider; + + public Command readCommand() { + final String readLine; + try { + final AttributedString prompt = this.promptProvider.getPrompt(); + readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); + + } catch (final UserInterruptException e) { + if (e.getPartialLine().isEmpty()) { + // Exit request from user CTRL+C + return ExitCommandInterpreter.EXIT_COMMAND; + } else { + // TODO maybe create empty command + return null; + } + } + // TODO can readLIne be null? + + // TODO does it trim trailing spaces? + if (ExitCommandName.isExitCommand(readLine)) { + return ExitCommandInterpreter.EXIT_COMMAND; + } + + try { + return RuleParser.parseCommand(readLine); + } catch (final ParsingException e) { + // FIXME do I need to flush terminal? + // TODO improve error message + this.lineReader.getTerminal().writer().println("Command cannot be parsed: " + e.getMessage()); + // return Input.EMPTY; + // TODO maybe create empty command + return null; + } + } + +// /** +// * Sanitize the buffer input given the customizations applied to the JLine +// * parser (e.g. support for line continuations, etc.) +// */ +// static List sanitizeInput(List words) { +// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by +// // backslash continuation +// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string +// .collect(Collectors.toList()); +// return words; +// } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java new file mode 100644 index 000000000..2b4085ac5 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -0,0 +1,60 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.reader.LineReaderBuilder; +import org.jline.terminal.Terminal; +import org.jline.terminal.TerminalBuilder; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; + +public final class DefaultConfiguration { + + private DefaultConfiguration() { + } + + public static PromptProvider buildPromptProvider() { + return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); + } + + public static LineReader buildLineReader(final Terminal terminal) { + final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) + .appName("Rulewerk Shell") + // .expander(expander()) + // .completer(buildCompleter()) + // .history(buildHistory()) + // .highlighter(buildHighlighter()) + ; + + final LineReader lineReader = lineReaderBuilder.build(); + lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than + // inserting a tab + return lineReader; + } + + public static Terminal buildTerminal() throws IOException { + return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java new file mode 100644 index 000000000..ff5fd6ea4 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java @@ -0,0 +1,28 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.jline.utils.AttributedString; + +public interface PromptProvider { + + AttributedString getPrompt(); +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java new file mode 100644 index 000000000..4cee43296 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -0,0 +1,40 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; + +public class RulewerkApplication { + + public static void main(final String[] args) throws IOException { + final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Shell shell = new Shell(terminal); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider); + + shell.run(commandReader); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java new file mode 100644 index 000000000..41f309f20 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -0,0 +1,102 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintStream; + +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.slf4j.Logger; + +public class Shell { + + private final Terminal terminal; + + private final Interpreter interpreter; + + public Shell(final Terminal terminal) { + this.terminal = terminal; + this.interpreter = this.initializeInterpreter(); + } + + private Interpreter initializeInterpreter() { + // FIXME connect terminal writer +// final PrintStream out = this.terminal.writer().; + final PrintStream out = System.out; + + // FIXME connect logger; + final Logger logger = null; + // TODO reasoner initial KB from args + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final Interpreter interpreter = new Interpreter(reasoner, out, logger); + + for (final ExitCommandName exitCommandName : ExitCommandName.values()) { + interpreter.registerCommandInterpreter(exitCommandName.toString(), new ExitCommandInterpreter()); + } + + return interpreter; + } + + public void run(final CommandReader commandReader) { + while (true) { + final Command command; + try { + command = commandReader.readCommand(); + } catch (final Exception e) { + // TODO: handle exception + continue; + } + + if (command != null) { + try { + this.interpreter.runCommand(command); + } catch (final CommandExecutionException e) { + // TODO: handle exception + continue; + } + + if (ExitCommandName.isExitCommand(command.getName())) { + break; + } + } + } + } + +// @Override +// public void handleResult(final Object result) { +// this.terminal.writer().println(result); +// this.terminal.writer().flush(); +// } + +// @Override +// public void handleResult(final AttributedCharSequence result) { +// this.terminal.writer().println(result.toAnsi(this.terminal)); +// this.terminal.writer().flush(); +// } +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java new file mode 100644 index 000000000..13eb81671 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -0,0 +1,66 @@ +package org.semanticweb.rulewerk.client.shell.commands; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; + +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.core.model.api.Command; + +public class ExitCommandInterpreter implements CommandInterpreter { + + public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>()); + + public static enum ExitCommandName + { + exit, quit; + + public static boolean isExitCommand(final String commandName) { + for(final ExitCommandName name: values()) { + if (name.toString().equals(commandName)) { + return true; + } + } + return false; + } + } + + @Override + public String getHelp(final String commandName) { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getSynopsis() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void run(final Command command, final org.semanticweb.rulewerk.commands.Interpreter interpreter) + throws CommandExecutionException { + // TODO Auto-generated method stub + interpreter.getOut().println("Quiting rulewerk."); + } + +} From 9cc7dd4c8d55f9681d937b238a1cbe9045c88160 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 09:43:39 +0200 Subject: [PATCH 0904/1255] Commands to set sources and prefixes Command parsing needs to use KB prefixes for this to work --- .../rulewerk/commands/Interpreter.java | 62 +++++++++++++- .../commands/SetPrefixCommandInterpreter.java | 53 ++++++++++++ .../commands/SetSourceCommandInterpreter.java | 84 +++++++++++++++++++ .../model/api/PrefixDeclarationRegistry.java | 7 ++ .../rulewerk/core/model/api/Terms.java | 39 +++++---- .../AbstractPrefixDeclarationRegistry.java | 5 ++ .../rulewerk/core/reasoner/KnowledgeBase.java | 12 ++- 7 files changed, 243 insertions(+), 19 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 959a0d6d6..404e98e6d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -25,21 +25,23 @@ import java.util.List; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.slf4j.Logger; +import org.semanticweb.rulewerk.parser.ParserConfiguration; public class Interpreter { final Reasoner reasoner; final PrintStream out; - final Logger logger; + final ParserConfiguration parserConfiguration; final HashMap commandInterpreters = new HashMap<>(); - public Interpreter(Reasoner reasoner, PrintStream out, Logger logger) { + public Interpreter(Reasoner reasoner, PrintStream out, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; this.out = out; - this.logger = logger; + this.parserConfiguration = parserConfiguration; registerDefaultCommandInterpreters(); } @@ -68,6 +70,10 @@ public void runCommand(Command command) throws CommandExecutionException { public Reasoner getReasoner() { return reasoner; } + + public ParserConfiguration getParserConfiguration() { + return parserConfiguration; + } public PrintStream getOut() { return out; @@ -79,6 +85,54 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("query", new QueryCommandInterpreter()); registerCommandInterpreter("reason", new ReasonCommandInterpreter()); registerCommandInterpreter("load", new LoadCommandInterpreter()); + registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + registerCommandInterpreter("setsource", new SetSourceCommandInterpreter()); + } + + /** + * Validate that the correct number of arguments was passed to a command. + * + * @param command Command to validate + * @param number expected number of parameters + * @throws CommandExecutionException if the number is not correct + */ + public static void validateArgumentCount(Command command, int number) throws CommandExecutionException { + if (command.getArguments().size() != number) { + throw new CommandExecutionException("This command requires exactly " + number + " argument(s), but " + + command.getArguments().size() + " were given."); + } + } + + private static CommandExecutionException getArgumentTypeError(int index, String expectedType, + String parameterName) { + return new CommandExecutionException( + "Argument at position " + index + " needs to be of type " + expectedType + " (" + parameterName + ")."); + } + + public static String extractStringArgument(Command command, int index, String parameterName) + throws CommandExecutionException { + try { + return Terms.extractString(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); + } catch (IllegalArgumentException e) { + throw getArgumentTypeError(index, "string", parameterName); + } + } + + public static String extractNameArgument(Command command, int index, String parameterName) + throws CommandExecutionException { + try { + return Terms.extractName(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); + } catch (IllegalArgumentException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } + } + + public static PositiveLiteral extractPositiveLiteralArgument(Command command, int index, String parameterName) + throws CommandExecutionException { + return command.getArguments().get(index).fromPositiveLiteral() + .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java new file mode 100644 index 000000000..7b9427d02 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -0,0 +1,53 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; + +public class SetPrefixCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); + String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); + + interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); + try { + interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, + prefixIri); + } catch (PrefixDeclarationException e) { // practically impossible + throw new CommandExecutionException("Setting prefix failed: " + e.getMessage()); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " : ."; + } + + @Override + public String getSynopsis() { + return "set a prefix to abbreviate long IRIs (only affects future inputs)"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java new file mode 100644 index 000000000..416084b38 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java @@ -0,0 +1,84 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class SetSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + + String predicateName; + int arity; + try { + int openBracket = predicateDeclaration.indexOf('['); + int closeBracket = predicateDeclaration.indexOf(']'); + predicateName = predicateDeclaration.substring(0, openBracket); + String arityString = predicateDeclaration.substring(openBracket + 1, closeBracket); + arity = Integer.parseInt(arityString); + } catch (IndexOutOfBoundsException | NumberFormatException e) { + throw new CommandExecutionException( + "Predicate declaration must have the format \"predicateName[number]\" but was " + + predicateDeclaration); + } + Predicate predicate = Expressions.makePredicate(predicateName, arity); + + DataSource dataSource; + try { + dataSource = interpreter.getParserConfiguration() + .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); + } catch (ParsingException e) { + throw new CommandExecutionException("Could not parse source declartion: " + e.getMessage()); + } + + if (dataSource.getRequiredArity().isPresent()) { + Integer requiredArity = dataSource.getRequiredArity().get(); + if (arity != requiredArity) { + throw new CommandExecutionException( + "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); + } + } + + interpreter.getReasoner().getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " []: ."; + } + + @Override + public String getSynopsis() { + return "define an external data source for a predicate"; + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 49cc7abe3..a3ac69b9c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -85,6 +85,13 @@ public interface PrefixDeclarationRegistry extends Iterableprefixed * name into an absolute IRI. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java index ba964c21c..8e8e05ac2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java @@ -132,6 +132,23 @@ public static String extractString(Term term) { "Term " + term.toString() + " is not a datatype constant of type xsd:string."); } + /** + * Returns the name of an abstract term, and throws an exception for all other + * cases. + * + * @param term the term from which the name is to be extracted + * @return extracted name + * @throws IllegalArgumentException if the given term is not an abstract + * constant + */ + public static String extractName(Term term) { + if (term.getType() == TermType.ABSTRACT_CONSTANT) { + return term.getName(); + } else { + throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); + } + } + /** * Returns the IRI representation of an abstract term, and throws an exception * for all other cases. @@ -142,14 +159,11 @@ public static String extractString(Term term) { * constant or cannot be parsed as an IRI */ public static URI extractIri(Term term) { - if (term.getType() == TermType.ABSTRACT_CONSTANT) { - try { - return new URI(term.getName()); - } catch (URISyntaxException e) { - throw new IllegalArgumentException(e); - } + try { + return new URI(extractName(term)); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); } - throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); } /** @@ -162,14 +176,11 @@ public static URI extractIri(Term term) { * constant or cannot be parsed as a URL */ public static URL extractUrl(Term term) { - if (term.getType() == TermType.ABSTRACT_CONSTANT) { - try { - return new URL(term.getName()); - } catch (MalformedURLException e) { - throw new IllegalArgumentException(e); - } + try { + return new URL(extractName(term)); + } catch (MalformedURLException e) { + throw new IllegalArgumentException(e); } - throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 2bb4e72a7..9f584fcec 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -64,6 +64,11 @@ public String getPrefixIri(String prefixName) throws PrefixDeclarationException return prefixes.get(prefixName); } + + @Override + public void unsetPrefix(String prefixName) { + prefixes.remove(prefixName); + } @Override public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index ad03ba16e..7cfe63c3a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -499,6 +499,15 @@ public void mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarationR this.prefixDeclarationRegistry.mergePrefixDeclarations(prefixDeclarationRegistry); } + /** + * Returns the {@link PrefixDeclarationRegistry} used by this knowledge base. + * + * @return registry for prefix declarations + */ + public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { + return this.prefixDeclarationRegistry; + } + /** * Return the base IRI. * @@ -564,7 +573,8 @@ public String unresolveAbsoluteIri(String iri) { * * @param stream the {@link OutputStream} to serialise to. * - * @throws IOException if an I/O error occurs while writing to given output stream + * @throws IOException if an I/O error occurs while writing to given output + * stream */ public void writeKnowledgeBase(OutputStream stream) throws IOException { stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); From 992e64f1f8d9c8863c8f0d715f656d99440cc6cc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 09:44:05 +0200 Subject: [PATCH 0905/1255] Improved error messages --- .../commands/AssertCommandInterpreter.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 0a4d0075e..759f5a059 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -30,8 +30,7 @@ public class AssertCommandInterpreter implements CommandInterpreter { @Override - public void run(Command command, Interpreter interpreter) - throws CommandExecutionException { + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { for (Argument argument : command.getArguments()) { if (argument.fromPositiveLiteral().isPresent()) { @@ -46,16 +45,17 @@ public void run(Command command, Interpreter interpreter) } else if (argument.fromRule().isPresent()) { interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); } else { - throw new CommandExecutionException("Only facts and rules can be asserted."); + throw new CommandExecutionException( + "Only facts and rules can be asserted. Encountered " + argument.toString()); } } } - + @Override public String getHelp(String commandName) { - return "Usage: @" + commandName + " ()+ .\n" + - " fact or rule: statement(s) to be added to the knowledge base\n" + - "Reasoning needs to be invoked after finishing addition of statements."; + return "Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be added to the knowledge base\n" + + "Reasoning needs to be invoked after finishing addition of statements."; } @Override From bdf5ba76d017745470c51337fca7d230f32ad967 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 09:48:38 +0200 Subject: [PATCH 0906/1255] Update Interpreter construction --- .../java/org/semanticweb/rulewerk/client/shell/Shell.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 41f309f20..ef7c49ef4 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -30,8 +30,9 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -import org.slf4j.Logger; public class Shell { @@ -49,12 +50,11 @@ private Interpreter initializeInterpreter() { // final PrintStream out = this.terminal.writer().; final PrintStream out = System.out; - // FIXME connect logger; - final Logger logger = null; // TODO reasoner initial KB from args final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final Interpreter interpreter = new Interpreter(reasoner, out, logger); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { interpreter.registerCommandInterpreter(exitCommandName.toString(), new ExitCommandInterpreter()); From 4d865026ee5c601c5d5b82d46ee1fd94dacb8bf8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 10:49:00 +0200 Subject: [PATCH 0907/1255] Improve command handling in shell --- .../rulewerk/client/shell/CommandReader.java | 52 ++++++++------ .../client/shell/RulewerkApplication.java | 28 +++++++- .../rulewerk/client/shell/Shell.java | 71 ++++--------------- .../commands/ExitCommandInterpreter.java | 16 +++-- .../rulewerk/commands/Interpreter.java | 34 +++++++++ 5 files changed, 116 insertions(+), 85 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index ea34fc323..9877ea6fa 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -24,53 +24,63 @@ import org.jline.reader.UserInterruptException; import org.jline.utils.AttributedString; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; +import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; public class CommandReader { - public CommandReader(final LineReader lineReader, final PromptProvider promptProvider) { - super(); + private final LineReader lineReader; + private final PromptProvider promptProvider; + private final Interpreter interpreter; + + public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, + final Interpreter interpreter) { + super(); // FIXME: there is no superclass? this.lineReader = lineReader; this.promptProvider = promptProvider; + this.interpreter = interpreter; } - private final LineReader lineReader; - - private final PromptProvider promptProvider; - + /** + * Reads a command from the prompt and returns a corresponding {@link Command} + * object. If no command should be executed, null is returned. Some effort is + * made to interpret mistyped commands by adding @ and . before and after the + * input, if forgotten. + * + * @return command or null + */ public Command readCommand() { - final String readLine; + String readLine; try { final AttributedString prompt = this.promptProvider.getPrompt(); readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); - } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C return ExitCommandInterpreter.EXIT_COMMAND; } else { - // TODO maybe create empty command - return null; + return null; // used as empty command } } - // TODO can readLIne be null? - // TODO does it trim trailing spaces? - if (ExitCommandName.isExitCommand(readLine)) { - return ExitCommandInterpreter.EXIT_COMMAND; + readLine = readLine.trim(); + if ("".equals(readLine)) { + return null; + } + if (readLine.charAt(0) != '@') { + readLine = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + readLine = readLine + "."; } try { - return RuleParser.parseCommand(readLine); + return interpreter.parseCommand(readLine); } catch (final ParsingException e) { // FIXME do I need to flush terminal? - // TODO improve error message - this.lineReader.getTerminal().writer().println("Command cannot be parsed: " + e.getMessage()); - // return Input.EMPTY; - // TODO maybe create empty command + this.lineReader.getTerminal().writer() + .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); return null; } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index 4cee43296..b01085223 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -21,20 +21,44 @@ */ import java.io.IOException; +import java.io.PrintStream; import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class RulewerkApplication { public static void main(final String[] args) throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Shell shell = new Shell(terminal); + final Interpreter interpreter = initializeInterpreter(terminal); + + final Shell shell = new Shell(interpreter); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); shell.run(commandReader); } + private static Interpreter initializeInterpreter(Terminal terminal) { + // FIXME connect terminal writer +// final PrintStream out = terminal.writer().; + final PrintStream out = System.out; + + // TODO reasoner initial KB from args + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + + return interpreter; + } + } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index ef7c49ef4..2555f3d50 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,75 +1,34 @@ package org.semanticweb.rulewerk.client.shell; -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.PrintStream; - -import org.jline.terminal.Terminal; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class Shell { - private final Terminal terminal; - private final Interpreter interpreter; + boolean running; - public Shell(final Terminal terminal) { - this.terminal = terminal; - this.interpreter = this.initializeInterpreter(); - } - - private Interpreter initializeInterpreter() { - // FIXME connect terminal writer -// final PrintStream out = this.terminal.writer().; - final PrintStream out = System.out; - - // TODO reasoner initial KB from args - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + public Shell(final Interpreter interpreter) { + this.interpreter = interpreter; + CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { - interpreter.registerCommandInterpreter(exitCommandName.toString(), new ExitCommandInterpreter()); + interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); } - - return interpreter; } public void run(final CommandReader commandReader) { - while (true) { + running = true; + while (running) { final Command command; try { command = commandReader.readCommand(); } catch (final Exception e) { - // TODO: handle exception + interpreter.getOut().println("Unexpected error: " + e.getMessage()); continue; } @@ -77,15 +36,15 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - // TODO: handle exception - continue; - } - - if (ExitCommandName.isExitCommand(command.getName())) { - break; + interpreter.getOut().println("Error: " + e.getMessage()); } } } + interpreter.getOut().println("Rulewerk shell is stopped. Bye."); + } + + public void exitShell() { + this.running = false; } // @Override diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 13eb81671..1b7dc4d4c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -22,6 +22,7 @@ import java.util.ArrayList; +import org.semanticweb.rulewerk.client.shell.Shell; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.core.model.api.Command; @@ -43,24 +44,27 @@ public static boolean isExitCommand(final String commandName) { return false; } } + + final Shell shell; + + public ExitCommandInterpreter(Shell shell) { + this.shell = shell; + } @Override public String getHelp(final String commandName) { - // TODO Auto-generated method stub - return null; + return "Usage: " + commandName + "."; } @Override public String getSynopsis() { - // TODO Auto-generated method stub - return null; + return "exit Rulewerk shell"; } @Override public void run(final Command command, final org.semanticweb.rulewerk.commands.Interpreter interpreter) throws CommandExecutionException { - // TODO Auto-generated method stub - interpreter.getOut().println("Quiting rulewerk."); + this.shell.exitShell(); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 404e98e6d..ddfc0b9c3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,5 +1,8 @@ package org.semanticweb.rulewerk.commands; +import java.io.ByteArrayInputStream; +import java.io.InputStream; + /*- * #%L * Rulewerk Core Components @@ -23,12 +26,18 @@ import java.io.PrintStream; import java.util.HashMap; import java.util.List; +import java.util.Map.Entry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.ParseException; +import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; public class Interpreter { @@ -66,6 +75,31 @@ public void runCommand(Command command) throws CommandExecutionException { throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); } } + + public Command parseCommand(String commandString) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes()); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + localParser.setParserConfiguration(parserConfiguration); + + // Copy prefixes from KB: + try { + localParser.getPrefixDeclarationRegistry().setBaseIri(reasoner.getKnowledgeBase().getBaseIri()); + for (Entry prefix : reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()) { + localParser.getPrefixDeclarationRegistry().setPrefixIri(prefix.getKey(), prefix.getValue()); + } + } catch (PrefixDeclarationException e) { // unlikely! + throw new RuntimeException(e); + } + + Command result; + try { + result = localParser.command(); + localParser.ensureEndOfInput(); + } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + throw new ParsingException("Exception while parsing command.", e); + } + return result; + } public Reasoner getReasoner() { return reasoner; From d0fed2f1dc0dc3ddc7d4640b545699fbbd8c79c8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 11:10:36 +0200 Subject: [PATCH 0908/1255] properly set encoding for String-based parsing --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 3 ++- .../java/org/semanticweb/rulewerk/parser/RuleParser.java | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index ddfc0b9c3..3a86661a3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -24,6 +24,7 @@ */ import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; @@ -77,7 +78,7 @@ public void runCommand(Command command) throws CommandExecutionException { } public Command parseCommand(String commandString) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes()); + final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); localParser.setParserConfiguration(parserConfiguration); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 124bb5381..12d186684 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -22,6 +22,7 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.List; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; @@ -169,8 +170,8 @@ T parse(final JavaCCParser parser) */ static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - final JavaCCParser localParser = new JavaCCParser(inputStream, DEFAULT_STRING_ENCODING); + final InputStream inputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); if (parserConfiguration != null) { localParser.setParserConfiguration(parserConfiguration); From 394581c06bcb3aa1023b68156d8d2ed8b2290d50 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 11:12:11 +0200 Subject: [PATCH 0909/1255] license header --- .../rulewerk/client/shell/Shell.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 2555f3d50..3075657ea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; From 44755a11bac2a2be0fc251bfb2e141bab3b7ff59 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:28:09 +0200 Subject: [PATCH 0910/1255] use PrintWriter instead of PrintStream --- .../rulewerk/client/shell/RulewerkApplication.java | 3 +-- .../semanticweb/rulewerk/commands/Interpreter.java | 12 ++++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index b01085223..50b15d594 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -50,13 +50,12 @@ public static void main(final String[] args) throws IOException { private static Interpreter initializeInterpreter(Terminal terminal) { // FIXME connect terminal writer // final PrintStream out = terminal.writer().; - final PrintStream out = System.out; // TODO reasoner initial KB from args final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); return interpreter; } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 3a86661a3..2de5ea02a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -23,7 +23,7 @@ * #L% */ -import java.io.PrintStream; +import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; @@ -43,12 +43,12 @@ public class Interpreter { final Reasoner reasoner; - final PrintStream out; + final PrintWriter out; final ParserConfiguration parserConfiguration; final HashMap commandInterpreters = new HashMap<>(); - public Interpreter(Reasoner reasoner, PrintStream out, ParserConfiguration parserConfiguration) { + public Interpreter(Reasoner reasoner, PrintWriter out, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; this.out = out; this.parserConfiguration = parserConfiguration; @@ -76,7 +76,7 @@ public void runCommand(Command command) throws CommandExecutionException { throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); } } - + public Command parseCommand(String commandString) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); @@ -105,12 +105,12 @@ public Command parseCommand(String commandString) throws ParsingException { public Reasoner getReasoner() { return reasoner; } - + public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - public PrintStream getOut() { + public PrintWriter getOut() { return out; } From e5fc2a847208b1d295ae25d0ebff4d913d9fc031 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:29:56 +0200 Subject: [PATCH 0911/1255] remove unused import --- .../semanticweb/rulewerk/client/shell/RulewerkApplication.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index 50b15d594..186dad027 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -21,7 +21,6 @@ */ import java.io.IOException; -import java.io.PrintStream; import org.jline.reader.LineReader; import org.jline.terminal.Terminal; From 2a347a549497fd91bafe584eaa12b3015f90270e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:34:18 +0200 Subject: [PATCH 0912/1255] print summary of asserts --- .../rulewerk/commands/AssertCommandInterpreter.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 759f5a059..081c6222c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -32,6 +32,8 @@ public class AssertCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; for (Argument argument : command.getArguments()) { if (argument.fromPositiveLiteral().isPresent()) { PositiveLiteral literal = argument.fromPositiveLiteral().get(); @@ -42,13 +44,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); } interpreter.getReasoner().getKnowledgeBase().addStatement(fact); + factCount++; } else if (argument.fromRule().isPresent()) { interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); + ruleCount++; } else { throw new CommandExecutionException( "Only facts and rules can be asserted. Encountered " + argument.toString()); } } + + interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rules."); } @Override From e49bc0eabbcc0b654c60f6ca10c3dc6a4d9e28b0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:39:14 +0200 Subject: [PATCH 0913/1255] More robust exception handling in parsing --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 1 + .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 2 +- .../main/java/org/semanticweb/rulewerk/parser/RuleParser.java | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 3075657ea..8c5edf214 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -49,6 +49,7 @@ public void run(final CommandReader commandReader) { command = commandReader.readCommand(); } catch (final Exception e) { interpreter.getOut().println("Unexpected error: " + e.getMessage()); + e.printStackTrace(); continue; } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 2de5ea02a..b4d70b0aa 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -96,7 +96,7 @@ public Command parseCommand(String commandString) throws ParsingException { try { result = localParser.command(); localParser.ensureEndOfInput(); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { throw new ParsingException("Exception while parsing command.", e); } return result; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 12d186684..146fa1085 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -181,7 +181,7 @@ static T parseSyntaxFragment(final String input, SyntaxFragme try { result = parserAction.parse(localParser); localParser.ensureEndOfInput(); - } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); } From 9a2e7072bb940dbe9fa40804089264d139856957 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 12:45:09 +0200 Subject: [PATCH 0914/1255] better error reporting --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index b4d70b0aa..46f6493c2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -97,7 +97,7 @@ public Command parseCommand(String commandString) throws ParsingException { result = localParser.command(); localParser.ensureEndOfInput(); } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { - throw new ParsingException("Exception while parsing command.", e); + throw new ParsingException("failed to parse command \"\"\"" + commandString + "\"\"\"", e); } return result; } From 8aff9426d7512e508b73d7e4b7db08f4c7b76a8a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:12:47 +0200 Subject: [PATCH 0915/1255] Remove unused import --- .../java/org/semanticweb/rulewerk/core/model/api/Argument.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java index dfbd0c771..e25136bd3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -23,8 +23,6 @@ import java.util.Optional; import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; - /** * A tagged union representing the possible types allowed to appear as arguments * in commands and parser directives. From 2a05cb664bcb86879f4750a0d1296cbfc7e7a374 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:45:15 +0200 Subject: [PATCH 0916/1255] write KB to Writer, not to OutputStream --- .../rulewerk/core/reasoner/KnowledgeBase.java | 33 ++-- .../implementation/QueryResultImpl.java | 151 ++++++++++++++++++ .../core/reasoner/KnowledgeBaseTest.java | 21 ++- 3 files changed, 180 insertions(+), 25 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 7cfe63c3a..5dc6e398c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -22,10 +22,12 @@ import java.io.File; import java.io.FileInputStream; -import java.io.FileOutputStream; +import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -571,27 +573,27 @@ public String unresolveAbsoluteIri(String iri) { /** * Serialise the KnowledgeBase to the {@link OutputStream}. * - * @param stream the {@link OutputStream} to serialise to. + * @param writer the {@link OutputStream} to serialise to. * * @throws IOException if an I/O error occurs while writing to given output * stream */ - public void writeKnowledgeBase(OutputStream stream) throws IOException { - stream.write(Serializer.getBaseAndPrefixDeclarations(this).getBytes()); + public void writeKnowledgeBase(Writer writer) throws IOException { + writer.write(Serializer.getBaseAndPrefixDeclarations(this)); for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { - stream.write(Serializer.getString(dataSource).getBytes()); - stream.write('\n'); + writer.write(Serializer.getString(dataSource)); + writer.write('\n'); } - for (Rule rule : this.getRules()) { - stream.write(Serializer.getString(rule).getBytes()); - stream.write('\n'); + for (Fact fact : this.getFacts()) { + writer.write(Serializer.getFactString(fact)); + writer.write('\n'); } - for (Fact fact : this.getFacts()) { - stream.write(Serializer.getFactString(fact).getBytes()); - stream.write('\n'); + for (Rule rule : this.getRules()) { + writer.write(Serializer.getString(rule)); + writer.write('\n'); } } @@ -601,10 +603,13 @@ public void writeKnowledgeBase(OutputStream stream) throws IOException { * @param filePath path to the file to serialise into. * * @throws IOException + * @deprecated Use {@link KnowledgeBase#writeKnowledgeBase(Writer)} instead. The + * method will disappear. */ + @Deprecated public void writeKnowledgeBase(String filePath) throws IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - this.writeKnowledgeBase(stream); + try (FileWriter writer = new FileWriter(filePath, StandardCharsets.UTF_8)) { + this.writeKnowledgeBase(writer); } } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 7d8a06f24..5207b1087 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -1,5 +1,9 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; + /* * #%L * Rulewerk Core Components @@ -21,23 +25,170 @@ */ import java.util.List; +import java.util.ListIterator; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; /** * Implements {@link QueryResult}s. + * * @author Irina Dragoste * */ public final class QueryResultImpl implements QueryResult { + static class ShallowTermList implements List { + + final Term[] data; + + public ShallowTermList(Term[] data) { + this.data = data; + } + + UnsupportedOperationException uoe() { return new UnsupportedOperationException(); } + + @Override public boolean add(Term e) { throw uoe(); } + @Override public boolean addAll(Collection c) { throw uoe(); } + @Override public void clear() { throw uoe(); } + @Override public boolean remove(Object o) { throw uoe(); } + @Override public boolean removeAll(Collection c) { throw uoe(); } + @Override public boolean retainAll(Collection c) { throw uoe(); } + @Override public void add(int index, Term element) { throw uoe(); } + @Override public boolean addAll(int index, Collection c) { throw uoe(); } + @Override public Term remove(int index) { throw uoe(); } + + @Override + public boolean contains(Object o) { + return indexOf(o) >= 0; + } + + @Override + public boolean containsAll(Collection arg0) { + // TODO Auto-generated method stub + return false; + } + + @Override + public Term get(int index) { + return data[index]; + } + + @Override + public int indexOf(Object o) { + for (int i = 0, s = size(); i < s; i++) { + if (get(i).equals(o)) { + return i; + } + } + return -1; + } + + @Override + public boolean isEmpty() { + return size() == 0; + } + + @Override + public Iterator iterator() { + // TODO Auto-generated method stub + return null; + } + + @Override + public int lastIndexOf(Object arg0) { + // TODO Auto-generated method stub + return 0; + } + + @Override + public ListIterator listIterator() { + // TODO Auto-generated method stub + return null; + } + + @Override + public ListIterator listIterator(int arg0) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Term set(int arg0, Term arg1) { + throw uoe(); + } + + @Override + public int size() { + return data.length; + } + + @Override + public List subList(int arg0, int arg1) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Object[] toArray() { + return Arrays.copyOf(data, data.length); + } + + @Override + @SuppressWarnings("unchecked") + public T[] toArray(T[] a) { + int size = data.length; + if (a.length < size) { + // Make a new array of a's runtime type, but my contents: + return (T[]) Arrays.copyOf(data, size, a.getClass()); + } + System.arraycopy(data, 0, a, 0, size); + if (a.length > size) { + a[size] = null; // null-terminate + } + return a; + } + + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + + if (!(o instanceof List)) { + return false; + } + + Iterator oit = ((List) o).iterator(); + for (int i = 0, s = size(); i < s; i++) { + if (!oit.hasNext() || !get(i).equals(oit.next())) { + return false; + } + } + return !oit.hasNext(); + } + + @Override + public int hashCode() { + int hash = 1; + for (int i = 0, s = size(); i < s; i++) { + hash = 31 * hash + get(i).hashCode(); + } + return hash; + } + } + private final List terms; public QueryResultImpl(List terms) { this.terms = terms; } + public static QueryResultImpl fromArray(Term[] terms) { + return new QueryResultImpl(new ShallowTermList(terms)); + } + @Override public List getTerms() { return this.terms; diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index d989d1a9f..91a350b65 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -22,9 +22,8 @@ import static org.junit.Assert.*; -import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.OutputStream; +import java.io.StringWriter; import java.net.URL; import java.util.Arrays; @@ -133,9 +132,9 @@ public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationExc @Test public void writeKnowledgeBase_justFacts_succeeds() throws IOException { - OutputStream stream = new ByteArrayOutputStream(); - this.kb.writeKnowledgeBase(stream); - assertEquals("P(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); + assertEquals("P(c) .\nP(d) .\nQ(c) .\n", writer.toString()); } @Test @@ -144,9 +143,9 @@ public void writeKnowledgeBase_withBase_succeeds() throws IOException { MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(baseIri); this.kb.mergePrefixDeclarations(prefixDeclarations); - OutputStream stream = new ByteArrayOutputStream(); - this.kb.writeKnowledgeBase(stream); - assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); + assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", writer.toString()); } @Test @@ -157,9 +156,9 @@ public void writeKnowledgeBase_alsoRuleAndDataSource_succeeds() throws IOExcepti this.kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("S", 1), new SparqlQueryResultDataSource(new URL(sparqlIri), "?X", sparqlBgp))); - OutputStream stream = new ByteArrayOutputStream(); - this.kb.writeKnowledgeBase(stream); + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); assertEquals("@source S[1]: sparql(<" + sparqlIri + ">, \"?X\", \"" + sparqlBgp - + "\") .\nP(?X) :- Q(?X) .\nP(c) .\nP(d) .\nQ(c) .\n", stream.toString()); + + "\") .\nP(c) .\nP(d) .\nQ(c) .\nP(?X) :- Q(?X) .\n", writer.toString()); } } From dc7a85284528ba13a63dfded8da7091d3023d777 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:54:03 +0200 Subject: [PATCH 0917/1255] improed format for command-specific help --- .../semanticweb/rulewerk/commands/HelpCommandInterpreter.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index a94b0fa30..8488b42f9 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -37,6 +37,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); if (interpreter.commandInterpreters.containsKey(helpCommand)) { + interpreter.getOut().println( + "@" + helpCommand + ": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); interpreter.getOut().println(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); } else { interpreter.getOut().println("Command '" + helpCommand + "' not known."); From a3b03f0d9ccf6d36e263e62d7915030803d0d5c6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 13:54:25 +0200 Subject: [PATCH 0918/1255] new command to show KB contents --- .../rulewerk/commands/Interpreter.java | 1 + .../commands/ShowKbCommandInterpreter.java | 29 +++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 46f6493c2..c057d96ef 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -122,6 +122,7 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("load", new LoadCommandInterpreter()); registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); registerCommandInterpreter("setsource", new SetSourceCommandInterpreter()); + registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } /** diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java new file mode 100644 index 000000000..2efb728b9 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -0,0 +1,29 @@ +package org.semanticweb.rulewerk.commands; + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Command; + +public class ShowKbCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 0); + try { + interpreter.getReasoner().getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + "."; + } + + @Override + public String getSynopsis() { + return "displays the content of the knowledge base"; + } + +} From ba989e4d9ce05999668b2d5908f4618b444202da Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:10:47 +0200 Subject: [PATCH 0919/1255] rename setsource to addsource --- ...Interpreter.java => AddSourceCommandInterpreter.java} | 9 ++++++--- .../org/semanticweb/rulewerk/commands/Interpreter.java | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) rename rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/{SetSourceCommandInterpreter.java => AddSourceCommandInterpreter.java} (89%) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java similarity index 89% rename from rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java rename to rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index 416084b38..c80c63f3f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -28,7 +28,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.parser.ParsingException; -public class SetSourceCommandInterpreter implements CommandInterpreter { +public class AddSourceCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { @@ -73,12 +73,15 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: ."; + return "Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " : a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources."; } @Override public String getSynopsis() { - return "define an external data source for a predicate"; + return "define a new external data source for a predicate"; } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index c057d96ef..156e2d24f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -121,7 +121,7 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("reason", new ReasonCommandInterpreter()); registerCommandInterpreter("load", new LoadCommandInterpreter()); registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); - registerCommandInterpreter("setsource", new SetSourceCommandInterpreter()); + registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } From ce27b691910ea9457b32559b29d2d2330e1c6ce7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:10:53 +0200 Subject: [PATCH 0920/1255] license header --- .../commands/ShowKbCommandInterpreter.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index 2efb728b9..86ea91d12 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.commands; +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.semanticweb.rulewerk.core.model.api.Command; From c144422c1f2364ea9c19bcd10a8e70938c528101 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:11:01 +0200 Subject: [PATCH 0921/1255] more informative help --- .../semanticweb/rulewerk/commands/HelpCommandInterpreter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 8488b42f9..94b687195 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -33,6 +33,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getOut().println( " @" + commandName + ": " + interpreter.commandInterpreters.get(commandName).getSynopsis()); } + interpreter.getOut().println(); + interpreter.getOut() + .println("For more information on any command, use @" + command.getName() + " [command name]."); } else if (command.getArguments().size() == 1 && command.getArguments().get(0).fromTerm().isPresent() && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); From 08a0b0e1b7a4f54310714bb3890f38f113f9969c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:39:34 +0200 Subject: [PATCH 0922/1255] convenience method to access KB --- .../rulewerk/commands/AssertCommandInterpreter.java | 6 +++--- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 5 +++++ .../rulewerk/commands/LoadCommandInterpreter.java | 2 +- .../rulewerk/commands/SetPrefixCommandInterpreter.java | 5 ++--- .../rulewerk/commands/ShowKbCommandInterpreter.java | 2 +- 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 081c6222c..b8f02719b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -43,17 +43,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } catch (IllegalArgumentException e) { throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); } - interpreter.getReasoner().getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(fact); factCount++; } else if (argument.fromRule().isPresent()) { - interpreter.getReasoner().getKnowledgeBase().addStatement(argument.fromRule().get()); + interpreter.getKnowledgeBase().addStatement(argument.fromRule().get()); ruleCount++; } else { throw new CommandExecutionException( "Only facts and rules can be asserted. Encountered " + argument.toString()); } } - + interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rules."); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 156e2d24f..0a13f02e3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -33,6 +33,7 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -106,6 +107,10 @@ public Reasoner getReasoner() { return reasoner; } + public KnowledgeBase getKnowledgeBase() { + return reasoner.getKnowledgeBase(); + } + public ParserConfiguration getParserConfiguration() { return parserConfiguration; } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index dde5e9d18..e1dda80e5 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -43,7 +43,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } try { FileInputStream fileInputStream = new FileInputStream(fileName); - RuleParser.parseInto(interpreter.getReasoner().getKnowledgeBase(), fileInputStream); + RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java index 7b9427d02..ed31fb400 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -31,10 +31,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); - interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); try { - interpreter.getReasoner().getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, - prefixIri); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, prefixIri); } catch (PrefixDeclarationException e) { // practically impossible throw new CommandExecutionException("Setting prefix failed: " + e.getMessage()); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index 86ea91d12..2927d4e34 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -30,7 +30,7 @@ public class ShowKbCommandInterpreter implements CommandInterpreter { public void run(Command command, Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 0); try { - interpreter.getReasoner().getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); + interpreter.getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } From 5e480211cd61992beabbea9592f3c997addcc507 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:43:00 +0200 Subject: [PATCH 0923/1255] improved message --- .../semanticweb/rulewerk/commands/AssertCommandInterpreter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index b8f02719b..c83b2ae46 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -54,7 +54,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rules."); + interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); } @Override From 952708889ae60c6df6efeefca7ee8ee2eb6d2505 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:43:26 +0200 Subject: [PATCH 0924/1255] flush message before starting to reason --- .../semanticweb/rulewerk/commands/ReasonCommandInterpreter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 6ecfd8944..07d600d71 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -35,6 +35,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } interpreter.getOut().println("Loading and materializing inferences ..."); + interpreter.getOut().flush(); Timer timer = new Timer("reasoning"); timer.start(); From e84695dc890b9e0a3e26c2b75e8120c447497059 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 14:44:39 +0200 Subject: [PATCH 0925/1255] flush goodbye before exiting --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 1 + 1 file changed, 1 insertion(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 8c5edf214..4c1e9daec 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -62,6 +62,7 @@ public void run(final CommandReader commandReader) { } } interpreter.getOut().println("Rulewerk shell is stopped. Bye."); + interpreter.getOut().flush(); } public void exitShell() { From 5de1b7326c2585f4b6c5558169096ae1a4172938 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:08:15 +0200 Subject: [PATCH 0926/1255] support parsing of partial source declarations --- .../semanticweb/rulewerk/parser/javacc/JavaCCParser.jj | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index e73b4b222..3dbff82a7 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -39,6 +39,7 @@ import java.net.URISyntaxException; import java.util.List; import java.util.ArrayList; +import java.util.Arrays; import java.util.LinkedList; import java.util.ArrayDeque; import java.util.Deque; @@ -166,10 +167,14 @@ Command command() throws PrefixDeclarationException : { } { name = < CUSTOM_DIRECTIVE > - ( LOOKAHEAD(predicateName() < ARITY >) predicateName = predicateName() arity = < ARITY > < COLON > arguments = Arguments() < DOT > { + ( LOOKAHEAD(predicateName() < ARITY > ) predicateName = predicateName() arity = < ARITY > < DOT > { + Argument argument = Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING)); + return new Command(name.image, Arrays.asList(argument)); + } + | LOOKAHEAD(predicateName() < ARITY >) predicateName = predicateName() arity = < ARITY > < COLON > arguments = Arguments() < DOT > { arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING))); return new Command(name.image,arguments); - } + } | arguments = Arguments() < DOT > { return new Command(name.image,arguments); } | pn = < PNAME_NS > arguments = Arguments() < DOT > { arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(pn.image, PrefixDeclarationRegistry.XSD_STRING))); From 4d690941413795f362e73e3fcb5992b7a619f559 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:10:23 +0200 Subject: [PATCH 0927/1255] support removal of sources --- .../commands/AddSourceCommandInterpreter.java | 59 +++++++------ .../rulewerk/commands/Interpreter.java | 1 + .../RemoveSourceCommandInterpreter.java | 82 +++++++++++++++++++ 3 files changed, 116 insertions(+), 26 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index c80c63f3f..f0182e3dd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -37,6 +37,34 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, "source declaration"); + Predicate predicate = extractPredicate(predicateDeclaration); + DataSource dataSource = extractDataSource(sourceDeclaration, interpreter); + + if (dataSource.getRequiredArity().isPresent()) { + Integer requiredArity = dataSource.getRequiredArity().get(); + if (predicate.getArity() != requiredArity) { + throw new CommandExecutionException("Invalid arity " + predicate.getArity() + " for data source, " + + "expected " + requiredArity + "."); + } + } + + interpreter.getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " : a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources."; + } + + @Override + public String getSynopsis() { + return "define a new external data source for a predicate"; + } + + static Predicate extractPredicate(String predicateDeclaration) throws CommandExecutionException { String predicateName; int arity; try { @@ -50,38 +78,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio "Predicate declaration must have the format \"predicateName[number]\" but was " + predicateDeclaration); } - Predicate predicate = Expressions.makePredicate(predicateName, arity); + return Expressions.makePredicate(predicateName, arity); + } - DataSource dataSource; + static DataSource extractDataSource(PositiveLiteral sourceDeclaration, Interpreter interpreter) + throws CommandExecutionException { try { - dataSource = interpreter.getParserConfiguration() + return interpreter.getParserConfiguration() .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); } catch (ParsingException e) { throw new CommandExecutionException("Could not parse source declartion: " + e.getMessage()); } - - if (dataSource.getRequiredArity().isPresent()) { - Integer requiredArity = dataSource.getRequiredArity().get(); - if (arity != requiredArity) { - throw new CommandExecutionException( - "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); - } - } - - interpreter.getReasoner().getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); - } - - @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: .\n" - + " [] : the name of the predicate and its arity\n" - + " : a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources."; - } - - @Override - public String getSynopsis() { - return "define a new external data source for a predicate"; } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 0a13f02e3..99ff2dac3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -127,6 +127,7 @@ private void registerDefaultCommandInterpreters() { registerCommandInterpreter("load", new LoadCommandInterpreter()); registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); + registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java new file mode 100644 index 000000000..1db1b96ff --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -0,0 +1,82 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; + +public class RemoveSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 0 || command.getArguments().size() > 2) { + throw new CommandExecutionException("This command requires one or two arguments."); + } + + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + DataSource dataSource = null; + if (command.getArguments().size() == 2) { + PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + dataSource = AddSourceCommandInterpreter.extractDataSource(sourceDeclaration, interpreter); + } + + if (dataSource != null) { + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + if (interpreter.getKnowledgeBase().getStatements().contains(dataSourceDeclaration)) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + interpreter.getOut().println("Removed specified data source declaration."); + } else { + interpreter.getOut().println("Specified data source declaration not found in knowledge base."); + } + } else { + int count = 0; + for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + .getDataSourceDeclarations()) { + if (dataSourceDeclaration.getPredicate().equals(predicate)) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + count++; + } + } + interpreter.getOut().println("Removed " + count + " matching data source declaration(s)."); + } + + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " (optional): a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources."; + } + + @Override + public String getSynopsis() { + return "remove one or all external data sources for a predicate"; + } + +} From 05a427857d307bcebd8460a85f098130937f1c67 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:34:27 +0200 Subject: [PATCH 0928/1255] let KnowledgeBase report on removed statements --- .../RemoveSourceCommandInterpreter.java | 3 +-- .../rulewerk/core/reasoner/KnowledgeBase.java | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 1db1b96ff..8577d9e7a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -46,8 +46,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (dataSource != null) { DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); - if (interpreter.getKnowledgeBase().getStatements().contains(dataSourceDeclaration)) { - interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { interpreter.getOut().println("Removed specified data source declaration."); } else { interpreter.getOut().println("Specified data source declaration not found in knowledge base."); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 5dc6e398c..40cb247ef 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -273,13 +273,18 @@ public void addStatements(final Statement... statements) { } /** - * Removes a single statement from the knowledge base. + * Removes a single statement from the knowledge base, and returns the number of + * statements that were actually removed (0 or 1). * * @param statement the statement to remove + * @return number of removed statements */ - public void removeStatement(final Statement statement) { + public int removeStatement(final Statement statement) { if (this.doRemoveStatement(statement)) { this.notifyListenersOnStatementRemoved(statement); + return 1; + } else { + return 0; } } @@ -303,8 +308,9 @@ boolean doRemoveStatement(final Statement statement) { * Removes a collection of statements to the knowledge base. * * @param statements the statements to remove + * @return number of removed statements */ - public void removeStatements(final Collection statements) { + public int removeStatements(final Collection statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { @@ -314,14 +320,16 @@ public void removeStatements(final Collection statements) { } this.notifyListenersOnStatementsRemoved(removedStatements); + return removedStatements.size(); } /** * Removes a list of statements from the knowledge base. * * @param statements the statements to remove + * @return number of removed statements */ - public void removeStatements(final Statement... statements) { + public int removeStatements(final Statement... statements) { final List removedStatements = new ArrayList<>(); for (final Statement statement : statements) { @@ -331,6 +339,7 @@ public void removeStatements(final Statement... statements) { } this.notifyListenersOnStatementsRemoved(removedStatements); + return removedStatements.size(); } private void notifyListenersOnStatementAdded(final Statement addedStatement) { From b40db91150b41be152fc5e2c3b353eb48692e8ee Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:34:58 +0200 Subject: [PATCH 0929/1255] support retraction of statements --- .../rulewerk/commands/Interpreter.java | 9 +-- .../commands/RetractCommandInterpreter.java | 68 +++++++++++++++++++ 2 files changed, 73 insertions(+), 4 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 99ff2dac3..1dd005700 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -121,13 +121,14 @@ public PrintWriter getOut() { private void registerDefaultCommandInterpreters() { registerCommandInterpreter("help", new HelpCommandInterpreter()); - registerCommandInterpreter("assert", new AssertCommandInterpreter()); - registerCommandInterpreter("query", new QueryCommandInterpreter()); - registerCommandInterpreter("reason", new ReasonCommandInterpreter()); registerCommandInterpreter("load", new LoadCommandInterpreter()); - registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + registerCommandInterpreter("assert", new AssertCommandInterpreter()); + registerCommandInterpreter("retract", new RetractCommandInterpreter()); registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); + registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + registerCommandInterpreter("reason", new ReasonCommandInterpreter()); + registerCommandInterpreter("query", new QueryCommandInterpreter()); registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java new file mode 100644 index 000000000..88ef8b6a2 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -0,0 +1,68 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class RetractCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } else if (argument.fromRule().isPresent()) { + ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); + } else { + throw new CommandExecutionException( + "Only facts and rules can be retracted. Encountered " + argument.toString()); + } + } + + interpreter.getOut().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be removed from the knowledge base\n" + + "Reasoning needs to be invoked after finishing the removal of statements."; + } + + @Override + public String getSynopsis() { + return "remove facts and rules to the knowledge base"; + } + +} From d01c6edd60af49185083536b848266f5f0bdb6c7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 15:35:09 +0200 Subject: [PATCH 0930/1255] formatting --- .../semanticweb/rulewerk/commands/AssertCommandInterpreter.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index c83b2ae46..ce2df6346 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -31,7 +31,6 @@ public class AssertCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - int factCount = 0; int ruleCount = 0; for (Argument argument : command.getArguments()) { From 0efce81f3f1cbacd05b903fc415461be29f1c31c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 15:51:17 +0200 Subject: [PATCH 0931/1255] added dummy Main class for main command with shell and materialize subcommands --- rulewerk-client/pom.xml | 2 +- .../rulewerk/client/picocli/Main.java | 62 ++++++++++++++++ .../client/shell/InteractiveShell.java | 72 +++++++++++++++++++ .../rulewerk/client/shell/Shell.java | 20 ++++++ 4 files changed, 155 insertions(+), 1 deletion(-) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 98bca4d59..760f4d472 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -94,7 +94,7 @@ - org.semanticweb.rulewerk.client.picocli.RulewerkClient + org.semanticweb.rulewerk.client.picocli.Main diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java new file mode 100644 index 000000000..3a2ac85bc --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -0,0 +1,62 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.client.shell.InteractiveShell; + +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Dummy class with main method that is a command with subcommands shell and + * materialize + * + * @author Irina Dragoste + * + */ +@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, + RulewerkClientMaterialize.class }) +public class Main { + + public static void main(final String[] args) throws IOException { + if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { + final InteractiveShell interactiveShell = new InteractiveShell(); + interactiveShell.run(); + } else { + if (args[0].equals("materialize")) { + final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); + commandline.execute(args); + } else { + if (!args[0].equals("help")) { + System.out.println("Invalid command."); + } + // TODO improve help + // TODO do we need to create a Help command? + (new CommandLine(new Main())).usage(System.out); + + } + } + + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java new file mode 100644 index 000000000..c39f0bb1b --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -0,0 +1,72 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.io.PrintStream; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +import picocli.CommandLine.Command; + +@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") +public class InteractiveShell +//implements Runnable +{ + +// @Override + public void run() throws IOException { + + final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Interpreter interpreter = initializeInterpreter(terminal); + + final Shell shell = new Shell(interpreter); + + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); + + shell.run(commandReader); + + } + + static Interpreter initializeInterpreter(final Terminal terminal) { + // FIXME connect terminal writer +// final PrintStream out = terminal.writer().; + final PrintStream out = System.out; + + // TODO reasoner initial KB from args + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); + + return interpreter; + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 2555f3d50..3075657ea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; From a6149e6d3aec7badc88fd64f31500a33d2584e51 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 15:58:13 +0200 Subject: [PATCH 0932/1255] fix compile error --- .../client/shell/InteractiveShell.java | 138 +++++++++--------- 1 file changed, 66 insertions(+), 72 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index c39f0bb1b..154861f30 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -1,72 +1,66 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.io.PrintStream; - -import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; - -import picocli.CommandLine.Command; - -@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") -public class InteractiveShell -//implements Runnable -{ - -// @Override - public void run() throws IOException { - - final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = initializeInterpreter(terminal); - - final Shell shell = new Shell(interpreter); - - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); - final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); - - shell.run(commandReader); - - } - - static Interpreter initializeInterpreter(final Terminal terminal) { - // FIXME connect terminal writer -// final PrintStream out = terminal.writer().; - final PrintStream out = System.out; - - // TODO reasoner initial KB from args - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, out, parserConfiguration); - - return interpreter; - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +import picocli.CommandLine.Command; + +@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") +public class InteractiveShell +//implements Runnable +{ + +// @Override + public void run() throws IOException { + + final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Interpreter interpreter = initializeInterpreter(terminal); + + final Shell shell = new Shell(interpreter); + + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); + + shell.run(commandReader); + + } + + static Interpreter initializeInterpreter(final Terminal terminal) { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = new VLogReasoner(knowledgeBase); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); + + return interpreter; + } + +} From 8f6a3b9ec5dda72c308cbfe683a3533fd341993c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 16:20:05 +0200 Subject: [PATCH 0933/1255] avoid Java 11 feature --- .../semanticweb/rulewerk/core/reasoner/KnowledgeBase.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 40cb247ef..b9f1d3ee9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -22,10 +22,11 @@ import java.io.File; import java.io.FileInputStream; -import java.io.FileWriter; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -617,7 +618,7 @@ public void writeKnowledgeBase(Writer writer) throws IOException { */ @Deprecated public void writeKnowledgeBase(String filePath) throws IOException { - try (FileWriter writer = new FileWriter(filePath, StandardCharsets.UTF_8)) { + try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { this.writeKnowledgeBase(writer); } } From 10034af3e3d4dd62a2a9523c57bb5256c44982f6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 16:42:59 +0200 Subject: [PATCH 0934/1255] improve @help format --- .../rulewerk/commands/HelpCommandInterpreter.java | 12 +++++++++--- .../semanticweb/rulewerk/commands/Interpreter.java | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 94b687195..33309e837 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -28,11 +28,17 @@ public class HelpCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { if (command.getArguments().size() == 0) { - interpreter.getOut().println("Available commands:"); + int maxLength = 0; for (String commandName : interpreter.commandInterpreters.keySet()) { - interpreter.getOut().println( - " @" + commandName + ": " + interpreter.commandInterpreters.get(commandName).getSynopsis()); + maxLength = (commandName.length() > maxLength) ? commandName.length() : maxLength; } + final int padLength = maxLength + 1; + + interpreter.getOut().println("Available commands:"); + interpreter.commandInterpreters.forEach((commandName, commandForName) -> { + interpreter.getOut().println(" @" + String.format("%1$-" + padLength + "s", commandName) + ": " + + commandForName.getSynopsis()); + }); interpreter.getOut().println(); interpreter.getOut() .println("For more information on any command, use @" + command.getName() + " [command name]."); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 1dd005700..402ad8e29 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -25,7 +25,7 @@ import java.io.PrintWriter; import java.nio.charset.StandardCharsets; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map.Entry; @@ -47,7 +47,7 @@ public class Interpreter { final PrintWriter out; final ParserConfiguration parserConfiguration; - final HashMap commandInterpreters = new HashMap<>(); + final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); public Interpreter(Reasoner reasoner, PrintWriter out, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; From bb1e429d6140afc3c191591d422ee272105568ae Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 16:43:09 +0200 Subject: [PATCH 0935/1255] show welcome message --- .../java/org/semanticweb/rulewerk/client/shell/Shell.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 4c1e9daec..d817e21ee 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -42,6 +42,8 @@ public Shell(final Interpreter interpreter) { } public void run(final CommandReader commandReader) { + printWelcome(); + running = true; while (running) { final Command command; @@ -68,6 +70,11 @@ public void run(final CommandReader commandReader) { public void exitShell() { this.running = false; } + + private void printWelcome() { + interpreter.getOut().println("Welcome to the Rulewerk interactive shell."); + interpreter.getOut().println("For further information, type @help."); + } // @Override // public void handleResult(final Object result) { From 63862dcd34c5b89109a3eb5d634ffac9804e4fca Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 17:02:16 +0200 Subject: [PATCH 0936/1255] remove deprecated class RulewerkApplication. InteractiveShell can be sued --- .../rulewerk/client/picocli/Main.java | 123 +++++++++--------- .../client/shell/InteractiveShell.java | 6 +- .../client/shell/RulewerkApplication.java | 62 --------- 3 files changed, 66 insertions(+), 125 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 3a2ac85bc..e75ef7eb0 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -1,62 +1,61 @@ -package org.semanticweb.rulewerk.client.picocli; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.semanticweb.rulewerk.client.shell.InteractiveShell; - -import picocli.CommandLine; -import picocli.CommandLine.Command; - -/** - * Dummy class with main method that is a command with subcommands shell and - * materialize - * - * @author Irina Dragoste - * - */ -@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, - RulewerkClientMaterialize.class }) -public class Main { - - public static void main(final String[] args) throws IOException { - if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - final InteractiveShell interactiveShell = new InteractiveShell(); - interactiveShell.run(); - } else { - if (args[0].equals("materialize")) { - final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); - commandline.execute(args); - } else { - if (!args[0].equals("help")) { - System.out.println("Invalid command."); - } - // TODO improve help - // TODO do we need to create a Help command? - (new CommandLine(new Main())).usage(System.out); - - } - } - - } - -} +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.client.shell.InteractiveShell; + +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Dummy class with main method that is a command with subcommands shell and + * materialize + * + * @author Irina Dragoste + * + */ +@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, + RulewerkClientMaterialize.class }) +public class Main { + + public static void main(final String[] args) throws IOException { + if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { + InteractiveShell.run(); + } else { + if (args[0].equals("materialize")) { + final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); + commandline.execute(args); + } else { + if (!args[0].equals("help")) { + System.out.println("Invalid command."); + } + // TODO improve help + // TODO do we need to create a Help command? + (new CommandLine(new Main())).usage(System.out); + + } + } + + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 154861f30..63563a837 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -38,8 +38,12 @@ public class InteractiveShell //implements Runnable { + public static void main(final String[] args) throws IOException { + run(); + } + // @Override - public void run() throws IOException { + public static void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); final Interpreter interpreter = initializeInterpreter(terminal); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java deleted file mode 100644 index 186dad027..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; - -public class RulewerkApplication { - - public static void main(final String[] args) throws IOException { - final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = initializeInterpreter(terminal); - - final Shell shell = new Shell(interpreter); - - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); - final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); - - shell.run(commandReader); - } - - private static Interpreter initializeInterpreter(Terminal terminal) { - // FIXME connect terminal writer -// final PrintStream out = terminal.writer().; - - // TODO reasoner initial KB from args - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); - - return interpreter; - } - -} From e99165907652e6d813efe8f459217fa4db6867a9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 17:08:48 +0200 Subject: [PATCH 0937/1255] removed unused dependency --- rulewerk-client/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 760f4d472..f05da9f94 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -68,11 +68,11 @@ jansi ${jansi.version} - + From bc9be81af4350a77e3babc17537acd356cab14c9 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 17:45:10 +0200 Subject: [PATCH 0938/1255] support pretty printing --- .../client/shell/InteractiveShell.java | 3 +- .../client/shell/RulewerkApplication.java | 2 +- .../rulewerk/client/shell/Shell.java | 22 +++--- .../client/shell/TerminalStyledPrinter.java | 72 +++++++++++++++++++ .../commands/ExitCommandInterpreter.java | 2 +- .../commands/AssertCommandInterpreter.java | 2 +- .../commands/HelpCommandInterpreter.java | 21 +++--- .../rulewerk/commands/Interpreter.java | 32 +++++++-- .../commands/QueryCommandInterpreter.java | 4 +- .../commands/ReasonCommandInterpreter.java | 6 +- .../RemoveSourceCommandInterpreter.java | 6 +- .../commands/RetractCommandInterpreter.java | 2 +- .../commands/ShowKbCommandInterpreter.java | 2 +- .../commands/SimpleStyledPrinter.java | 70 ++++++++++++++++++ .../rulewerk/commands/StyledPrinter.java | 39 ++++++++++ 15 files changed, 246 insertions(+), 39 deletions(-) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 154861f30..283be81e8 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -58,7 +58,8 @@ static Interpreter initializeInterpreter(final Terminal terminal) { final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); + final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), + parserConfiguration); return interpreter; } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java index 186dad027..5df9d5743 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/RulewerkApplication.java @@ -54,7 +54,7 @@ private static Interpreter initializeInterpreter(Terminal terminal) { final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, terminal.writer(), parserConfiguration); + final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), parserConfiguration); return interpreter; } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index d817e21ee..9a2cd08d0 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -30,6 +30,7 @@ public class Shell { private final Interpreter interpreter; + boolean running; public Shell(final Interpreter interpreter) { @@ -43,14 +44,14 @@ public Shell(final Interpreter interpreter) { public void run(final CommandReader commandReader) { printWelcome(); - + running = true; while (running) { final Command command; try { command = commandReader.readCommand(); } catch (final Exception e) { - interpreter.getOut().println("Unexpected error: " + e.getMessage()); + interpreter.getWriter().println("Unexpected error: " + e.getMessage()); e.printStackTrace(); continue; } @@ -59,21 +60,26 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - interpreter.getOut().println("Error: " + e.getMessage()); + interpreter.getWriter().println("Error: " + e.getMessage()); } } } - interpreter.getOut().println("Rulewerk shell is stopped. Bye."); - interpreter.getOut().flush(); + interpreter.printSection("Existing Rulewerk shell ... bye.\n\n"); + interpreter.getWriter().flush(); } public void exitShell() { this.running = false; } - + private void printWelcome() { - interpreter.getOut().println("Welcome to the Rulewerk interactive shell."); - interpreter.getOut().println("For further information, type @help."); + interpreter.printNormal("\n"); + interpreter.printSection("Welcome to the Rulewerk interactive shell.\n"); + interpreter.printNormal("For further information, type "); + interpreter.printCode("@help."); + interpreter.printNormal(" To quit, type "); + interpreter.printCode("@exit.\n"); + interpreter.printNormal("\n"); } // @Override diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java new file mode 100644 index 000000000..8e77422f5 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java @@ -0,0 +1,72 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.StyledPrinter; + +public class TerminalStyledPrinter implements StyledPrinter { + + final Terminal terminal; + + public TerminalStyledPrinter(final Terminal terminal) { + this.terminal = terminal; + } + + @Override + public void printNormal(String string) { + printStyled(string, AttributedStyle.DEFAULT); + } + + @Override + public void printSection(String string) { + printStyled(string, AttributedStyle.DEFAULT.bold()); + } + + @Override + public void printEmph(String string) { + printStyled(string, AttributedStyle.DEFAULT.bold()); + } + + @Override + public void printCode(String string) { + printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + } + + @Override + public void printImportant(String string) { + printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + } + + @Override + public PrintWriter getWriter() { + return terminal.writer(); + } + + private void printStyled(String string, AttributedStyle attributedStyle) { + AttributedString attributedString = new AttributedString(string, attributedStyle); + getWriter().print(attributedString.toAnsi(terminal)); + } +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 1b7dc4d4c..5c2ac4c83 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -33,7 +33,7 @@ public class ExitCommandInterpreter implements CommandInterpreter { public static enum ExitCommandName { - exit, quit; + exit; public static boolean isExitCommand(final String commandName) { for(final ExitCommandName name: values()) { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index ce2df6346..99e1c90e7 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getOut().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.getWriter().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 33309e837..a06abf369 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -34,26 +34,25 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } final int padLength = maxLength + 1; - interpreter.getOut().println("Available commands:"); + interpreter.printSection("Available commands:\n"); interpreter.commandInterpreters.forEach((commandName, commandForName) -> { - interpreter.getOut().println(" @" + String.format("%1$-" + padLength + "s", commandName) + ": " - + commandForName.getSynopsis()); + interpreter.printCode(" @" + String.format("%1$-" + padLength + "s", commandName)); + interpreter.printNormal(": " + commandForName.getSynopsis() + "\n"); }); - interpreter.getOut().println(); - interpreter.getOut() - .println("For more information on any command, use @" + command.getName() + " [command name]."); + interpreter.printNormal("\nFor more information on any command, use "); + interpreter.printCode("@" + command.getName() + " [command name].\n"); } else if (command.getArguments().size() == 1 && command.getArguments().get(0).fromTerm().isPresent() && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); if (interpreter.commandInterpreters.containsKey(helpCommand)) { - interpreter.getOut().println( - "@" + helpCommand + ": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); - interpreter.getOut().println(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); + interpreter.printCode("@" + helpCommand); + interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); + interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); } else { - interpreter.getOut().println("Command '" + helpCommand + "' not known."); + interpreter.printNormal("Command '" + helpCommand + "' not known."); } } else { - interpreter.getOut().println(getHelp(command.getName())); + interpreter.printNormal(getHelp(command.getName())); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 402ad8e29..56843bad5 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -2,6 +2,7 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.io.PrintWriter; /*- * #%L @@ -23,7 +24,6 @@ * #L% */ -import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.LinkedHashMap; import java.util.List; @@ -44,14 +44,14 @@ public class Interpreter { final Reasoner reasoner; - final PrintWriter out; + final StyledPrinter printer; final ParserConfiguration parserConfiguration; final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - public Interpreter(Reasoner reasoner, PrintWriter out, ParserConfiguration parserConfiguration) { + public Interpreter(Reasoner reasoner, StyledPrinter printer, ParserConfiguration parserConfiguration) { this.reasoner = reasoner; - this.out = out; + this.printer = printer; this.parserConfiguration = parserConfiguration; registerDefaultCommandInterpreters(); } @@ -115,8 +115,28 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - public PrintWriter getOut() { - return out; + public PrintWriter getWriter() { + return printer.getWriter(); + } + + public void printNormal(String string) { + printer.printNormal(string); + } + + public void printSection(String string) { + printer.printSection(string); + } + + public void printEmph(String string) { + printer.printEmph(string); + } + + public void printCode(String string) { + printer.printCode(string); + } + + public void printImportant(String string) { + printer.printImportant(string); } private void registerDefaultCommandInterpreters() { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index bc8f69056..5bead8d3f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -64,11 +64,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { int count = 0; while (count != limit && answers.hasNext()) { - interpreter.getOut().println(" " + answers.next()); + interpreter.getWriter().println(" " + answers.next()); count++; } timer.stop(); - interpreter.getOut().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + interpreter.getWriter().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms. Results are " + answers.getCorrectness() + "."); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 07d600d71..fe0b9f580 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -34,8 +34,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("This command supports no arguments."); } - interpreter.getOut().println("Loading and materializing inferences ..."); - interpreter.getOut().flush(); + interpreter.getWriter().println("Loading and materializing inferences ..."); + interpreter.getWriter().flush(); Timer timer = new Timer("reasoning"); timer.start(); @@ -45,7 +45,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getOut().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.getWriter().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 8577d9e7a..8da1513a8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -47,9 +47,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (dataSource != null) { DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { - interpreter.getOut().println("Removed specified data source declaration."); + interpreter.getWriter().println("Removed specified data source declaration."); } else { - interpreter.getOut().println("Specified data source declaration not found in knowledge base."); + interpreter.getWriter().println("Specified data source declaration not found in knowledge base."); } } else { int count = 0; @@ -60,7 +60,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio count++; } } - interpreter.getOut().println("Removed " + count + " matching data source declaration(s)."); + interpreter.getWriter().println("Removed " + count + " matching data source declaration(s)."); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 88ef8b6a2..5680ae27b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -50,7 +50,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getOut().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.getWriter().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index 2927d4e34..b854e0b4d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -30,7 +30,7 @@ public class ShowKbCommandInterpreter implements CommandInterpreter { public void run(Command command, Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 0); try { - interpreter.getKnowledgeBase().writeKnowledgeBase(interpreter.getOut()); + interpreter.getKnowledgeBase().writeKnowledgeBase(interpreter.getWriter()); } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java new file mode 100644 index 000000000..3c91a2218 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java @@ -0,0 +1,70 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +/** + * Simple implementation of {@link StyledPrinter} based on an arbitrary + * PrintWriter without any styling. + * + * @author Markus Kroetzsch + * + */ +public class SimpleStyledPrinter implements StyledPrinter { + + final PrintWriter printWriter; + + public SimpleStyledPrinter(final PrintWriter printWriter) { + this.printWriter = printWriter; + } + + @Override + public void printNormal(String string) { + printWriter.print(string); + } + + @Override + public void printSection(String string) { + printWriter.print(string); + } + + @Override + public void printEmph(String string) { + printWriter.print(string); + } + + @Override + public void printCode(String string) { + printWriter.print(string); + } + + @Override + public void printImportant(String string) { + printWriter.print(string); + } + + @Override + public PrintWriter getWriter() { + return printWriter; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java new file mode 100644 index 000000000..a3d73df78 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java @@ -0,0 +1,39 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +public interface StyledPrinter { + + void printNormal(String string); + + void printSection(String string); + + void printEmph(String string); + + void printCode(String string); + + void printImportant(String string); + + PrintWriter getWriter(); + +} From a0b3a7d2ca7d6c9bfbc6b7fa5db9c6a2e65eda16 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 17:57:48 +0200 Subject: [PATCH 0939/1255] fix formatting for command help --- .../rulewerk/commands/HelpCommandInterpreter.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index a06abf369..292a50b2a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -46,10 +46,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); if (interpreter.commandInterpreters.containsKey(helpCommand)) { interpreter.printCode("@" + helpCommand); - interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis()); - interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand)); + interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis() + "\n"); + interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand) + "\n"); } else { - interpreter.printNormal("Command '" + helpCommand + "' not known."); + interpreter.printNormal("Command '" + helpCommand + "' not known.\n"); } } else { interpreter.printNormal(getHelp(command.getName())); From eaa98c8648014c8ae00d5f5e8823d00e2eed2014 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 18:16:47 +0200 Subject: [PATCH 0940/1255] remove accidentally committed changes --- .../implementation/QueryResultImpl.java | 150 ------------------ 1 file changed, 150 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 5207b1087..57231434a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -1,9 +1,5 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; -import java.util.Arrays; -import java.util.Collection; -import java.util.Iterator; - /* * #%L * Rulewerk Core Components @@ -25,7 +21,6 @@ */ import java.util.List; -import java.util.ListIterator; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; @@ -38,157 +33,12 @@ */ public final class QueryResultImpl implements QueryResult { - static class ShallowTermList implements List { - - final Term[] data; - - public ShallowTermList(Term[] data) { - this.data = data; - } - - UnsupportedOperationException uoe() { return new UnsupportedOperationException(); } - - @Override public boolean add(Term e) { throw uoe(); } - @Override public boolean addAll(Collection c) { throw uoe(); } - @Override public void clear() { throw uoe(); } - @Override public boolean remove(Object o) { throw uoe(); } - @Override public boolean removeAll(Collection c) { throw uoe(); } - @Override public boolean retainAll(Collection c) { throw uoe(); } - @Override public void add(int index, Term element) { throw uoe(); } - @Override public boolean addAll(int index, Collection c) { throw uoe(); } - @Override public Term remove(int index) { throw uoe(); } - - @Override - public boolean contains(Object o) { - return indexOf(o) >= 0; - } - - @Override - public boolean containsAll(Collection arg0) { - // TODO Auto-generated method stub - return false; - } - - @Override - public Term get(int index) { - return data[index]; - } - - @Override - public int indexOf(Object o) { - for (int i = 0, s = size(); i < s; i++) { - if (get(i).equals(o)) { - return i; - } - } - return -1; - } - - @Override - public boolean isEmpty() { - return size() == 0; - } - - @Override - public Iterator iterator() { - // TODO Auto-generated method stub - return null; - } - - @Override - public int lastIndexOf(Object arg0) { - // TODO Auto-generated method stub - return 0; - } - - @Override - public ListIterator listIterator() { - // TODO Auto-generated method stub - return null; - } - - @Override - public ListIterator listIterator(int arg0) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Term set(int arg0, Term arg1) { - throw uoe(); - } - - @Override - public int size() { - return data.length; - } - - @Override - public List subList(int arg0, int arg1) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Object[] toArray() { - return Arrays.copyOf(data, data.length); - } - - @Override - @SuppressWarnings("unchecked") - public T[] toArray(T[] a) { - int size = data.length; - if (a.length < size) { - // Make a new array of a's runtime type, but my contents: - return (T[]) Arrays.copyOf(data, size, a.getClass()); - } - System.arraycopy(data, 0, a, 0, size); - if (a.length > size) { - a[size] = null; // null-terminate - } - return a; - } - - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - - if (!(o instanceof List)) { - return false; - } - - Iterator oit = ((List) o).iterator(); - for (int i = 0, s = size(); i < s; i++) { - if (!oit.hasNext() || !get(i).equals(oit.next())) { - return false; - } - } - return !oit.hasNext(); - } - - @Override - public int hashCode() { - int hash = 1; - for (int i = 0, s = size(); i < s; i++) { - hash = 31 * hash + get(i).hashCode(); - } - return hash; - } - } - private final List terms; public QueryResultImpl(List terms) { this.terms = terms; } - public static QueryResultImpl fromArray(Term[] terms) { - return new QueryResultImpl(new ShallowTermList(terms)); - } - @Override public List getTerms() { return this.terms; From 0b6de9a700128f0ccc96b1544e139418bd45f803 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 21 Aug 2020 18:24:30 +0200 Subject: [PATCH 0941/1255] configure logging to avoid error message --- .../rulewerk/client/picocli/Main.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index e75ef7eb0..15a0d259f 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -22,6 +22,10 @@ import java.io.IOException; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; import org.semanticweb.rulewerk.client.shell.InteractiveShell; import picocli.CommandLine; @@ -39,6 +43,8 @@ public class Main { public static void main(final String[] args) throws IOException { + configureLogging(); + if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { InteractiveShell.run(); } else { @@ -57,5 +63,19 @@ public static void main(final String[] args) throws IOException { } } + + public static void configureLogging() { + // Create the appender that will write log messages to the console. + final ConsoleAppender consoleAppender = new ConsoleAppender(); + // Define the pattern of log messages. + // Insert the string "%c{1}:%L" to also show class name and line. + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + consoleAppender.setLayout(new PatternLayout(pattern)); + // Change to Level.ERROR for fewer messages: + consoleAppender.setThreshold(Level.FATAL); + + consoleAppender.activateOptions(); + Logger.getRootLogger().addAppender(consoleAppender); + } } From 0d7828cc3fe15e16d59098f303b00b72bedfbdc1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 21 Aug 2020 18:52:41 +0200 Subject: [PATCH 0942/1255] add StringCompleter for command names --- .../rulewerk/client/shell/CommandReader.java | 199 ++- .../client/shell/DefaultConfiguration.java | 135 ++- .../client/shell/InteractiveShell.java | 2 +- .../rulewerk/commands/Interpreter.java | 407 ++++--- .../core/model/implementation/Serializer.java | 1073 +++++++++-------- 5 files changed, 939 insertions(+), 877 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 9877ea6fa..f3aeb72e6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -1,100 +1,99 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; -import org.jline.utils.AttributedString; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class CommandReader { - - private final LineReader lineReader; - private final PromptProvider promptProvider; - private final Interpreter interpreter; - - public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, - final Interpreter interpreter) { - super(); // FIXME: there is no superclass? - this.lineReader = lineReader; - this.promptProvider = promptProvider; - this.interpreter = interpreter; - } - - /** - * Reads a command from the prompt and returns a corresponding {@link Command} - * object. If no command should be executed, null is returned. Some effort is - * made to interpret mistyped commands by adding @ and . before and after the - * input, if forgotten. - * - * @return command or null - */ - public Command readCommand() { - String readLine; - try { - final AttributedString prompt = this.promptProvider.getPrompt(); - readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); - } catch (final UserInterruptException e) { - if (e.getPartialLine().isEmpty()) { - // Exit request from user CTRL+C - return ExitCommandInterpreter.EXIT_COMMAND; - } else { - return null; // used as empty command - } - } - - readLine = readLine.trim(); - if ("".equals(readLine)) { - return null; - } - if (readLine.charAt(0) != '@') { - readLine = "@" + readLine; - } - if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + "."; - } - - try { - return interpreter.parseCommand(readLine); - } catch (final ParsingException e) { - // FIXME do I need to flush terminal? - this.lineReader.getTerminal().writer() - .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); - return null; - } - } - -// /** -// * Sanitize the buffer input given the customizations applied to the JLine -// * parser (e.g. support for line continuations, etc.) -// */ -// static List sanitizeInput(List words) { -// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by -// // backslash continuation -// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string -// .collect(Collectors.toList()); -// return words; -// } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.jline.utils.AttributedString; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class CommandReader { + + private final LineReader lineReader; + private final PromptProvider promptProvider; + private final Interpreter interpreter; + + public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, + final Interpreter interpreter) { + this.lineReader = lineReader; + this.promptProvider = promptProvider; + this.interpreter = interpreter; + } + + /** + * Reads a command from the prompt and returns a corresponding {@link Command} + * object. If no command should be executed, null is returned. Some effort is + * made to interpret mistyped commands by adding @ and . before and after the + * input, if forgotten. + * + * @return command or null + */ + public Command readCommand() { + String readLine; + try { + final AttributedString prompt = this.promptProvider.getPrompt(); + readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); + } catch (final UserInterruptException e) { + if (e.getPartialLine().isEmpty()) { + // Exit request from user CTRL+C + return ExitCommandInterpreter.EXIT_COMMAND; + } else { + return null; // used as empty command + } + } + + readLine = readLine.trim(); + if ("".equals(readLine)) { + return null; + } + if (readLine.charAt(0) != '@') { + readLine = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + readLine = readLine + "."; + } + + try { + return this.interpreter.parseCommand(readLine); + } catch (final ParsingException e) { + // FIXME do I need to flush terminal? + this.lineReader.getTerminal().writer() + .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + return null; + } + } + +// /** +// * Sanitize the buffer input given the customizations applied to the JLine +// * parser (e.g. support for line continuations, etc.) +// */ +// static List sanitizeInput(List words) { +// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by +// // backslash continuation +// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string +// .collect(Collectors.toList()); +// return words; +// } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 2b4085ac5..4cdc16ef7 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -1,60 +1,75 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.jline.reader.LineReader; -import org.jline.reader.LineReaderBuilder; -import org.jline.terminal.Terminal; -import org.jline.terminal.TerminalBuilder; -import org.jline.utils.AttributedString; -import org.jline.utils.AttributedStyle; - -public final class DefaultConfiguration { - - private DefaultConfiguration() { - } - - public static PromptProvider buildPromptProvider() { - return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); - } - - public static LineReader buildLineReader(final Terminal terminal) { - final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) - .appName("Rulewerk Shell") - // .expander(expander()) - // .completer(buildCompleter()) - // .history(buildHistory()) - // .highlighter(buildHighlighter()) - ; - - final LineReader lineReader = lineReaderBuilder.build(); - lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than - // inserting a tab - return lineReader; - } - - public static Terminal buildTerminal() throws IOException { - return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.jline.reader.Completer; +import org.jline.reader.LineReader; +import org.jline.reader.LineReaderBuilder; +import org.jline.reader.impl.completer.StringsCompleter; +import org.jline.terminal.Terminal; +import org.jline.terminal.TerminalBuilder; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +public final class DefaultConfiguration { + + private DefaultConfiguration() { + } + + public static PromptProvider buildPromptProvider() { + return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); + } + + public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { + final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) + .appName("Rulewerk Shell") + .completer(buildCompleter(interpreter)) + // .expander(expander()) + // .history(buildHistory()) + // .highlighter(buildHighlighter()) + ; + + final LineReader lineReader = lineReaderBuilder.build(); + lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than + // inserting a tab + return lineReader; + } + + private static Completer buildCompleter(final Interpreter interpreter) { + final Set registeredCommandNames = interpreter.getRegisteredCommands(); + final List serializedCommandNames = registeredCommandNames.stream() + .map(commandName -> Serializer.getCommandName(commandName)) + .collect(Collectors.toList()); + return new StringsCompleter(serializedCommandNames); + } + + public static Terminal buildTerminal() throws IOException { + return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index a66fd7383..0bee3a90c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -50,7 +50,7 @@ public static void run() throws IOException { final Shell shell = new Shell(interpreter); - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 56843bad5..724b61271 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,201 +1,206 @@ -package org.semanticweb.rulewerk.commands; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.PrintWriter; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.nio.charset.StandardCharsets; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map.Entry; - -import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Terms; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.ParserConfiguration; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; -import org.semanticweb.rulewerk.parser.javacc.ParseException; -import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; - -public class Interpreter { - - final Reasoner reasoner; - final StyledPrinter printer; - final ParserConfiguration parserConfiguration; - - final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - - public Interpreter(Reasoner reasoner, StyledPrinter printer, ParserConfiguration parserConfiguration) { - this.reasoner = reasoner; - this.printer = printer; - this.parserConfiguration = parserConfiguration; - registerDefaultCommandInterpreters(); - } - - public void registerCommandInterpreter(String command, CommandInterpreter commandInterpreter) { - commandInterpreters.put(command, commandInterpreter); - } - - public void runCommands(List commands) throws CommandExecutionException { - for (Command command : commands) { - runCommand(command); - } - } - - public void runCommand(Command command) throws CommandExecutionException { - if (commandInterpreters.containsKey(command.getName())) { - try { - commandInterpreters.get(command.getName()).run(command, this); - } catch (Exception e) { - throw new CommandExecutionException(e.getMessage(), e); - } - } else { - throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); - } - } - - public Command parseCommand(String commandString) throws ParsingException { - final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); - final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); - localParser.setParserConfiguration(parserConfiguration); - - // Copy prefixes from KB: - try { - localParser.getPrefixDeclarationRegistry().setBaseIri(reasoner.getKnowledgeBase().getBaseIri()); - for (Entry prefix : reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()) { - localParser.getPrefixDeclarationRegistry().setPrefixIri(prefix.getKey(), prefix.getValue()); - } - } catch (PrefixDeclarationException e) { // unlikely! - throw new RuntimeException(e); - } - - Command result; - try { - result = localParser.command(); - localParser.ensureEndOfInput(); - } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { - throw new ParsingException("failed to parse command \"\"\"" + commandString + "\"\"\"", e); - } - return result; - } - - public Reasoner getReasoner() { - return reasoner; - } - - public KnowledgeBase getKnowledgeBase() { - return reasoner.getKnowledgeBase(); - } - - public ParserConfiguration getParserConfiguration() { - return parserConfiguration; - } - - public PrintWriter getWriter() { - return printer.getWriter(); - } - - public void printNormal(String string) { - printer.printNormal(string); - } - - public void printSection(String string) { - printer.printSection(string); - } - - public void printEmph(String string) { - printer.printEmph(string); - } - - public void printCode(String string) { - printer.printCode(string); - } - - public void printImportant(String string) { - printer.printImportant(string); - } - - private void registerDefaultCommandInterpreters() { - registerCommandInterpreter("help", new HelpCommandInterpreter()); - registerCommandInterpreter("load", new LoadCommandInterpreter()); - registerCommandInterpreter("assert", new AssertCommandInterpreter()); - registerCommandInterpreter("retract", new RetractCommandInterpreter()); - registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); - registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); - registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); - registerCommandInterpreter("reason", new ReasonCommandInterpreter()); - registerCommandInterpreter("query", new QueryCommandInterpreter()); - registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); - } - - /** - * Validate that the correct number of arguments was passed to a command. - * - * @param command Command to validate - * @param number expected number of parameters - * @throws CommandExecutionException if the number is not correct - */ - public static void validateArgumentCount(Command command, int number) throws CommandExecutionException { - if (command.getArguments().size() != number) { - throw new CommandExecutionException("This command requires exactly " + number + " argument(s), but " - + command.getArguments().size() + " were given."); - } - } - - private static CommandExecutionException getArgumentTypeError(int index, String expectedType, - String parameterName) { - return new CommandExecutionException( - "Argument at position " + index + " needs to be of type " + expectedType + " (" + parameterName + ")."); - } - - public static String extractStringArgument(Command command, int index, String parameterName) - throws CommandExecutionException { - try { - return Terms.extractString(command.getArguments().get(index).fromTerm() - .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); - } catch (IllegalArgumentException e) { - throw getArgumentTypeError(index, "string", parameterName); - } - } - - public static String extractNameArgument(Command command, int index, String parameterName) - throws CommandExecutionException { - try { - return Terms.extractName(command.getArguments().get(index).fromTerm() - .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); - } catch (IllegalArgumentException e) { - throw getArgumentTypeError(index, "constant", parameterName); - } - } - - public static PositiveLiteral extractPositiveLiteralArgument(Command command, int index, String parameterName) - throws CommandExecutionException { - return command.getArguments().get(index).fromPositiveLiteral() - .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); - } - -} +package org.semanticweb.rulewerk.commands; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.io.PrintWriter; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.nio.charset.StandardCharsets; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.ParseException; +import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; + +public class Interpreter { + + final Reasoner reasoner; + final StyledPrinter printer; + final ParserConfiguration parserConfiguration; + + final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); + + public Interpreter(final Reasoner reasoner, final StyledPrinter printer, final ParserConfiguration parserConfiguration) { + this.reasoner = reasoner; + this.printer = printer; + this.parserConfiguration = parserConfiguration; + this.registerDefaultCommandInterpreters(); + } + + public void registerCommandInterpreter(final String command, final CommandInterpreter commandInterpreter) { + this.commandInterpreters.put(command, commandInterpreter); + } + + public Set getRegisteredCommands() { + return this.commandInterpreters.keySet(); + } + + public void runCommands(final List commands) throws CommandExecutionException { + for (final Command command : commands) { + this.runCommand(command); + } + } + + public void runCommand(final Command command) throws CommandExecutionException { + if (this.commandInterpreters.containsKey(command.getName())) { + try { + this.commandInterpreters.get(command.getName()).run(command, this); + } catch (final Exception e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } else { + throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); + } + } + + public Command parseCommand(final String commandString) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + localParser.setParserConfiguration(this.parserConfiguration); + + // Copy prefixes from KB: + try { + localParser.getPrefixDeclarationRegistry().setBaseIri(this.reasoner.getKnowledgeBase().getBaseIri()); + for (final Entry prefix : this.reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()) { + localParser.getPrefixDeclarationRegistry().setPrefixIri(prefix.getKey(), prefix.getValue()); + } + } catch (final PrefixDeclarationException e) { // unlikely! + throw new RuntimeException(e); + } + + Command result; + try { + result = localParser.command(); + localParser.ensureEndOfInput(); + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { + throw new ParsingException("failed to parse command \"\"\"" + commandString + "\"\"\"", e); + } + return result; + } + + public Reasoner getReasoner() { + return this.reasoner; + } + + public KnowledgeBase getKnowledgeBase() { + return this.reasoner.getKnowledgeBase(); + } + + public ParserConfiguration getParserConfiguration() { + return this.parserConfiguration; + } + + public PrintWriter getWriter() { + return this.printer.getWriter(); + } + + public void printNormal(final String string) { + this.printer.printNormal(string); + } + + public void printSection(final String string) { + this.printer.printSection(string); + } + + public void printEmph(final String string) { + this.printer.printEmph(string); + } + + public void printCode(final String string) { + this.printer.printCode(string); + } + + public void printImportant(final String string) { + this.printer.printImportant(string); + } + + private void registerDefaultCommandInterpreters() { + this.registerCommandInterpreter("help", new HelpCommandInterpreter()); + this.registerCommandInterpreter("load", new LoadCommandInterpreter()); + this.registerCommandInterpreter("assert", new AssertCommandInterpreter()); + this.registerCommandInterpreter("retract", new RetractCommandInterpreter()); + this.registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); + this.registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); + this.registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + this.registerCommandInterpreter("reason", new ReasonCommandInterpreter()); + this.registerCommandInterpreter("query", new QueryCommandInterpreter()); + this.registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); + } + + /** + * Validate that the correct number of arguments was passed to a command. + * + * @param command Command to validate + * @param number expected number of parameters + * @throws CommandExecutionException if the number is not correct + */ + public static void validateArgumentCount(final Command command, final int number) throws CommandExecutionException { + if (command.getArguments().size() != number) { + throw new CommandExecutionException("This command requires exactly " + number + " argument(s), but " + + command.getArguments().size() + " were given."); + } + } + + private static CommandExecutionException getArgumentTypeError(final int index, final String expectedType, + final String parameterName) { + return new CommandExecutionException( + "Argument at position " + index + " needs to be of type " + expectedType + " (" + parameterName + ")."); + } + + public static String extractStringArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + try { + return Terms.extractString(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); + } catch (final IllegalArgumentException e) { + throw getArgumentTypeError(index, "string", parameterName); + } + } + + public static String extractNameArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + try { + return Terms.extractName(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); + } catch (final IllegalArgumentException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } + } + + public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + return command.getArguments().get(index).fromPositiveLiteral() + .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 281c16cc6..7ae74f80b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,515 +1,558 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; - -/** - * A utility class with static methods to obtain the correct parsable string - * representation of the different data models. - * - * @author Ali Elhalawati - * - */ -public final class Serializer { - private static final String NEW_LINE = "\n"; - public static final String STATEMENT_SEPARATOR = " ."; - public static final String COMMA = ", "; - public static final String NEGATIVE_IDENTIFIER = "~"; - public static final String EXISTENTIAL_IDENTIFIER = "!"; - public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_:"; - public static final String OPENING_PARENTHESIS = "("; - public static final String CLOSING_PARENTHESIS = ")"; - public static final String OPENING_BRACKET = "["; - public static final String CLOSING_BRACKET = "]"; - public static final String RULE_SEPARATOR = " :- "; - public static final char AT = '@'; - public static final String DATA_SOURCE = "@source "; - public static final String BASE = "@base "; - public static final String PREFIX = "@prefix "; - public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String DATA_SOURCE_SEPARATOR = ": "; - public static final String COLON = ":"; - public static final String DOUBLE_CARET = "^^"; - public static final char LESS_THAN = '<'; - public static final char MORE_THAN = '>'; - public static final char QUOTE = '"'; - - public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String REGEX_INTEGER = "^[-+]?\\d+$"; - public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String REGEX_TRUE = "true"; - public static final String REGEX_FALSE = "false"; - - /** - * Constructor. - */ - private Serializer() { - - } - - /** - * Creates a String representation of a given {@link Rule}. - * - * @see Rule syntax - * @param rule a {@link Rule}. - * @return String representation corresponding to a given {@link Rule}. - * - */ - public static String getString(final Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link Conjunction}. - * - * @see Rule syntax - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. - */ - public static String getString(final Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Literal}. - * - * @see Rule syntax - * @param literal a {@link Literal} - * @return String representation corresponding to a given {@link Literal}. - */ - public static String getString(final Literal literal) { - final StringBuilder stringBuilder = new StringBuilder(""); - if (literal.isNegated()) { - stringBuilder.append(NEGATIVE_IDENTIFIER); - } - stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Fact}. - * - * @see Rule syntax - * @param fact a {@link Fact} - * @return String representation corresponding to a given {@link Fact}. - */ - public static String getFactString(final Fact fact) { - return getString(fact) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule syntax - * @param constant a {@link AbstractConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant, Function iriTransformer) { - return getIRIString(constant.getName(), iriTransformer); - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule syntax - * @param constant a {@link AbstractConstant} - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant) { - return getIRIString(constant.getName()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - * - * @see Rule syntax - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - */ - public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
    - *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • - *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • - *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • - *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • - *
  • {@code "test"^^} results in - * {@code "test"^^}, modulo transformation of the datatype - * IRI.
  • - *
- * - * @see Rule syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant, Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { - return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } - - return getConstantName(datatypeConstant, iriTransformer); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
    - *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • - *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • - *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • - *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • - *
  • {@code "test"^^} results in - * {@code "test"^^}.
  • - *
- * - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant, Function.identity()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - private static String getConstantName(final DatatypeConstant datatypeConstant, - Function iriTransformer) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + addAngleBrackets(datatypeConstant.getDatatype()); - } - - /** - * Creates a String representation of a given {@link ExistentialVariable}. - * - * @see Rule syntax - * @param existentialVariable a {@link ExistentialVariable} - * @return String representation corresponding to a given - * {@link ExistentialVariable}. - */ - public static String getString(final ExistentialVariable existentialVariable) { - return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); - } - - /** - * Creates a String representation of a given {@link UniversalVariable}. - * - * @see Rule syntax - * @param universalVariable a {@link UniversalVariable} - * @return String representation corresponding to a given - * {@link UniversalVariable}. - */ - public static String getString(final UniversalVariable universalVariable) { - return UNIVERSAL_IDENTIFIER + universalVariable.getName(); - } - - /** - * Creates a String representation of a given {@link NamedNull}. - * - * @see Rule syntax - * @param namedNull a {@link NamedNull} - * @return String representation corresponding to a given {@link NamedNull}. - */ - public static String getString(final NamedNull namedNull) { - return NAMEDNULL_IDENTIFIER + namedNull.getName(); - } - - /** - * Creates a String representation of a given {@link Predicate}. - * - * @see Rule syntax - * @param predicate a {@link Predicate} - * @return String representation corresponding to a given {@link Predicate}. - */ - public static String getString(final Predicate predicate) { - return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; - } - - /** - * Creates a String representation of a given {@link DataSourceDeclaration}. - * - * @see Rule syntax - * @param dataSourceDeclaration a {@link DataSourceDeclaration} - * @return String representation corresponding to a given - * {@link DataSourceDeclaration}. - */ - public static String getString(final DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR - + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link CsvFileDataSource}. - * - * @see Rule syntax - * - * @param csvFileDataSource - * @return String representation corresponding to a given - * {@link CsvFileDataSource}. - */ - public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given {@link RdfFileDataSource}. - * - * @see Rule syntax - * - * - * @param rdfFileDataSource - * @return String representation corresponding to a given - * {@link RdfFileDataSource}. - */ - public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given - * {@link SparqlQueryResultDataSource}. - * - * @see Rule syntax - * - * - * @param dataSource - * @return String representation corresponding to a given - * {@link SparqlQueryResultDataSource}. - */ - public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS - + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA - + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS; - } - - private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath()); - } - - private static String getIRIString(final String string) { - return getIRIString(string, Function.identity()); - } - - private static String getIRIString(final String string, Function iriTransformer) { - String transformed = iriTransformer.apply(string); - - if (!transformed.equals(string)) { - return transformed; - } - - if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) - || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { - return addAngleBrackets(string); - } - - return string; - } - - /** - * Constructs the parseable, serialized representation of given {@code string}. - * Escapes (with {@code \}) special character occurrences in given - * {@code string}, and surrounds the result with double quotation marks - * ({@code "}). The special characters are: - *
    - *
  • {@code \}
  • - *
  • {@code "}
  • - *
  • {@code \t}
  • - *
  • {@code \b}
  • - *
  • {@code \n}
  • - *
  • {@code \r}
  • - *
  • {@code \f}
  • - *
- * Example for {@code string = "\\a"}, the returned value is - * {@code string = "\"\\\\a\""} - * - * @param string - * @return an escaped string surrounded by {@code "}. - */ - public static String getString(final String string) { - return addQuotes(escape(string)); - } - - /** - * Escapes (with {@code \}) special character occurrences in given - * {@code string}. The special characters are: - *
    - *
  • {@code \}
  • - *
  • {@code "}
  • - *
  • {@code \t}
  • - *
  • {@code \b}
  • - *
  • {@code \n}
  • - *
  • {@code \r}
  • - *
  • {@code \f}
  • - *
- * - * @param string - * @return an escaped string - */ - private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); - // don't touch single quotes here since we only construct double-quoted strings - } - - private static String addQuotes(final String string) { - return QUOTE + string + QUOTE; - } - - private static String addAngleBrackets(final String string) { - return LESS_THAN + string + MORE_THAN; - } - - public static String getFactString(Predicate predicate, List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getFactString(Predicate predicate, List terms, Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getString(Predicate predicate, List terms) { - return getString(predicate, terms, Function.identity()); - } - - public static String getString(Predicate predicate, List terms, Function iriTransformer) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); - stringBuilder.append(OPENING_PARENTHESIS); - - boolean first = true; - for (final Term term : terms) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getSyntacticRepresentation(iriTransformer); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); - } - - public static String getBaseString(KnowledgeBase knowledgeBase) { - String baseIri = knowledgeBase.getBaseIri(); - - return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); - } - - private static String getBaseDeclarationString(String baseIri) { - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getPrefixString(Entry prefix) { - return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getBaseAndPrefixDeclarations(KnowledgeBase knowledgeBase) { - StringBuilder sb = new StringBuilder(); - - sb.append(getBaseString(knowledgeBase)); - knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); - - return sb.toString(); - } -} +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +/** + * A utility class with static methods to obtain the correct parsable string + * representation of the different data models. + * + * @author Ali Elhalawati + * + */ +public final class Serializer { + private static final String NEW_LINE = "\n"; + public static final String STATEMENT_SEPARATOR = " ."; + public static final String COMMA = ", "; + public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String EXISTENTIAL_IDENTIFIER = "!"; + public static final String UNIVERSAL_IDENTIFIER = "?"; + public static final String NAMEDNULL_IDENTIFIER = "_:"; + public static final String OPENING_PARENTHESIS = "("; + public static final String CLOSING_PARENTHESIS = ")"; + public static final String OPENING_BRACKET = "["; + public static final String CLOSING_BRACKET = "]"; + public static final String RULE_SEPARATOR = " :- "; + public static final char AT = '@'; + public static final String DATA_SOURCE = "@source "; + public static final String BASE = "@base "; + public static final String PREFIX = "@prefix "; + public static final String CSV_FILE_DATA_SOURCE = "load-csv"; + public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; + public static final String DATA_SOURCE_SEPARATOR = ": "; + public static final String COLON = ":"; + public static final String DOUBLE_CARET = "^^"; + public static final char LESS_THAN = '<'; + public static final char MORE_THAN = '>'; + public static final char QUOTE = '"'; + + public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String REGEX_INTEGER = "^[-+]?\\d+$"; + public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String REGEX_TRUE = "true"; + public static final String REGEX_FALSE = "false"; + + /** + * Constructor. + */ + private Serializer() { + + } + + /** + * Creates a String representation of a given {@link Rule}. + * + * @see Rule + * syntax + * @param rule a {@link Rule}. + * @return String representation corresponding to a given {@link Rule}. + * + */ + public static String getString(final Rule rule) { + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link Conjunction}. + * + * @see Rule + * syntax + * @param conjunction a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ + public static String getString(final Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + stringBuilder.append(getString(literal)); + } + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Literal}. + * + * @see Rule + * syntax + * @param literal a {@link Literal} + * @return String representation corresponding to a given {@link Literal}. + */ + public static String getString(final Literal literal) { + final StringBuilder stringBuilder = new StringBuilder(""); + if (literal.isNegated()) { + stringBuilder.append(NEGATIVE_IDENTIFIER); + } + stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Fact}. + * + * @see Rule + * syntax + * @param fact a {@link Fact} + * @return String representation corresponding to a given {@link Fact}. + */ + public static String getFactString(final Fact fact) { + return getString(fact) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant, final Function iriTransformer) { + return getIRIString(constant.getName(), iriTransformer); + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant) { + return getIRIString(constant.getName()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + * + * @see Rule + * syntax + * @param languageStringConstant a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(final LanguageStringConstant languageStringConstant) { + return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
    + *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • + *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • + *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • + *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • + *
  • {@code "test"^^} results in + * {@code "test"^^}, modulo transformation of the datatype + * IRI.
  • + *
+ * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { + return getString(datatypeConstant.getLexicalValue()); + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); + } + + return getConstantName(datatypeConstant, iriTransformer); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
    + *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • + *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • + *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • + *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • + *
  • {@code "test"^^} results in + * {@code "test"^^}.
  • + *
+ * + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant, Function.identity()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + private static String getConstantName(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + addAngleBrackets(datatypeConstant.getDatatype()); + } + + /** + * Creates a String representation of a given {@link ExistentialVariable}. + * + * @see Rule + * syntax + * @param existentialVariable a {@link ExistentialVariable} + * @return String representation corresponding to a given + * {@link ExistentialVariable}. + */ + public static String getString(final ExistentialVariable existentialVariable) { + return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); + } + + /** + * Creates a String representation of a given {@link UniversalVariable}. + * + * @see Rule + * syntax + * @param universalVariable a {@link UniversalVariable} + * @return String representation corresponding to a given + * {@link UniversalVariable}. + */ + public static String getString(final UniversalVariable universalVariable) { + return UNIVERSAL_IDENTIFIER + universalVariable.getName(); + } + + /** + * Creates a String representation of a given {@link NamedNull}. + * + * @see Rule + * syntax + * @param namedNull a {@link NamedNull} + * @return String representation corresponding to a given {@link NamedNull}. + */ + public static String getString(final NamedNull namedNull) { + return NAMEDNULL_IDENTIFIER + namedNull.getName(); + } + + /** + * Creates a String representation of a given {@link Predicate}. + * + * @see Rule + * syntax + * @param predicate a {@link Predicate} + * @return String representation corresponding to a given {@link Predicate}. + */ + public static String getString(final Predicate predicate) { + return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; + } + + /** + * Creates a String representation of a given {@link DataSourceDeclaration}. + * + * @see Rule + * syntax + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @return String representation corresponding to a given + * {@link DataSourceDeclaration}. + */ + public static String getString(final DataSourceDeclaration dataSourceDeclaration) { + return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR + + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link CsvFileDataSource}. + * + * @see Rule + * syntax + * + * @param csvFileDataSource + * @return String representation corresponding to a given + * {@link CsvFileDataSource}. + */ + public static String getString(final CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given {@link RdfFileDataSource}. + * + * @see Rule + * syntax + * + * + * @param rdfFileDataSource + * @return String representation corresponding to a given + * {@link RdfFileDataSource}. + */ + public static String getString(final RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given + * {@link SparqlQueryResultDataSource}. + * + * @see Rule + * syntax + * + * + * @param dataSource + * @return String representation corresponding to a given + * {@link SparqlQueryResultDataSource}. + */ + public static String getString(final SparqlQueryResultDataSource dataSource) { + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS + + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + + CLOSING_PARENTHESIS; + } + + private static String getFileString(final FileDataSource fileDataSource) { + return getString(fileDataSource.getPath()); + } + + private static String getIRIString(final String string) { + return getIRIString(string, Function.identity()); + } + + private static String getIRIString(final String string, final Function iriTransformer) { + final String transformed = iriTransformer.apply(string); + + if (!transformed.equals(string)) { + return transformed; + } + + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { + return addAngleBrackets(string); + } + + return string; + } + + /** + * Constructs the parseable, serialized representation of given {@code string}. + * Escapes (with {@code \}) special character occurrences in given + * {@code string}, and surrounds the result with double quotation marks + * ({@code "}). The special characters are: + *
    + *
  • {@code \}
  • + *
  • {@code "}
  • + *
  • {@code \t}
  • + *
  • {@code \b}
  • + *
  • {@code \n}
  • + *
  • {@code \r}
  • + *
  • {@code \f}
  • + *
+ * Example for {@code string = "\\a"}, the returned value is + * {@code string = "\"\\\\a\""} + * + * @param string + * @return an escaped string surrounded by {@code "}. + */ + public static String getString(final String string) { + return addQuotes(escape(string)); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
    + *
  • {@code \}
  • + *
  • {@code "}
  • + *
  • {@code \t}
  • + *
  • {@code \b}
  • + *
  • {@code \n}
  • + *
  • {@code \r}
  • + *
  • {@code \f}
  • + *
+ * + * @param string + * @return an escaped string + */ + private static String escape(final String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); + // don't touch single quotes here since we only construct double-quoted strings + } + + private static String addQuotes(final String string) { + return QUOTE + string + QUOTE; + } + + private static String addAngleBrackets(final String string) { + return LESS_THAN + string + MORE_THAN; + } + + public static String getFactString(final Predicate predicate, final List terms) { + return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getFactString(final Predicate predicate, final List terms, + final Function iriTransformer) { + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getString(final Predicate predicate, final List terms) { + return getString(predicate, terms, Function.identity()); + } + + public static String getString(final Predicate predicate, final List terms, + final Function iriTransformer) { + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); + stringBuilder.append(OPENING_PARENTHESIS); + + boolean first = true; + for (final Term term : terms) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + final String string = term.getSyntacticRepresentation(iriTransformer); + stringBuilder.append(string); + } + stringBuilder.append(CLOSING_PARENTHESIS); + return stringBuilder.toString(); + } + + public static String getBaseString(final KnowledgeBase knowledgeBase) { + final String baseIri = knowledgeBase.getBaseIri(); + + return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); + } + + private static String getBaseDeclarationString(final String baseIri) { + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getPrefixString(final Entry prefix) { + return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { + final StringBuilder sb = new StringBuilder(); + + sb.append(getBaseString(knowledgeBase)); + knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); + + return sb.toString(); + } + + public static String getCommandName(final String commandName) { + return AT + commandName; + } +} From 6b792094b13b9c7fc6f2a5c4390626bcbae8a3d4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 21 Aug 2020 21:07:29 +0200 Subject: [PATCH 0943/1255] Disable warnings when building VLog in CI --- build-vlog-library.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/build-vlog-library.sh b/build-vlog-library.sh index c1d915137..ff059801f 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -15,7 +15,13 @@ else # git pull mkdir build cd build - cmake -DJAVA=1 -DSPARQL=1 .. + if [ "x${CI}" = "xtrue" ] + then + # disable warnings when running in CI to keep travis logs short + cmake -DJAVA=1 -DSPARQL=1 -DCMAKE_CXX_FLAGS=-w .. + else + cmake -DJAVA=1 -DSPARQL=1 .. + fi make cp jvlog.jar ../../../local_builds/jvlog.jar cd ../../.. From b3aa1f63bd73e482ea9e63aa3062c87b2166996c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 14:34:14 +0200 Subject: [PATCH 0944/1255] More efficient IRI unresolving --- .../model/api/PrefixDeclarationRegistry.java | 3 +- .../AbstractPrefixDeclarationRegistry.java | 29 +++++++++++++ .../MergingPrefixDeclarationRegistry.java | 43 +------------------ .../MergingPrefixDeclarationRegistryTest.java | 13 +++--- 4 files changed, 40 insertions(+), 48 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index a3ac69b9c..3fdd3b939 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -91,7 +91,7 @@ public interface PrefixDeclarationRegistry extends Iterableprefixed * name into an absolute IRI. @@ -114,4 +114,5 @@ public interface PrefixDeclarationRegistry extends Iterable baseIri.length() + && iri.startsWith(baseIri)) { + shortestIri = iri.substring(baseIri.length()); + } + + for (Map.Entry entry : prefixes.entrySet()) { + int localNameLength = iri.length() - entry.getValue().length(); + if (localNameLength > 0 && shortestIri.length() > localNameLength + entry.getKey().length() + && iri.startsWith(entry.getValue())) { + shortestIri = entry.getKey() + iri.substring(entry.getValue().length()); + } + } + + return shortestIri; + } @Override public Iterator> iterator() { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index c9104a280..5dade8819 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -20,11 +20,6 @@ * #L% */ -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import java.util.Map.Entry; import org.apache.commons.lang3.Validate; @@ -47,7 +42,7 @@ final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclar /** * Template string to use for generated prefix name */ - private static final String GENERATED_PREFIX_TEMPLATE = "rulewerk_generated_%d" + private static final String GENERATED_PREFIX_TEMPLATE = "rw_gen%d" + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; public MergingPrefixDeclarationRegistry() { @@ -99,42 +94,6 @@ public void setPrefixIri(String prefixName, String prefixIri) { prefixes.put(name, prefixIri); } - /** - * Turn an absolute Iri into a (possibly) prefixed name. Dual to - * {@link AbstractPrefixDeclarationRegistry#resolvePrefixedName}. - * - * @param iri an absolute Iri to abbreviate. - * - * @return an abbreviated form of {@code iri} if an appropriate prefix is known, - * or {@code iri}. - */ - public String unresolveAbsoluteIri(String iri) { - Map matches = new HashMap<>(); - String baseIri = getBaseIri(); - - if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseIri) && !iri.equals(baseIri)) { - matches.put(iri.replaceFirst(baseIri, PrefixDeclarationRegistry.EMPTY_BASE), baseIri.length()); - } - - prefixes.forEach((prefixName, prefixIri) -> { - // only select proper prefixes here, since `eg:` is not a valid prefixed name. - if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { - matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); - } - }); - - List matchesByLength = new ArrayList<>(matches.keySet()); - // reverse order, so we get the longest match first - matchesByLength.sort(Comparator.comparing(matches::get).reversed()); - - if (matchesByLength.size() > 0) { - return matchesByLength.get(0); - } else { - // no matching prefix - return iri; - } - } - /** * Merge another set of prefix declarations. * diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 812385669..9bf049824 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -104,7 +104,7 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce @Test public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { - String prefix = "rulewerk_generated_"; + String prefix = "rw_gen"; prefixDeclarations.setPrefixIri(prefix + "0:", BASE + "generated/"); prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); @@ -128,7 +128,7 @@ public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPref prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); assertEquals(BASE, this.prefixDeclarations.getPrefixIri("eg:")); - assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("rulewerk_generated_0:")); + assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("rw_gen0:")); } @Test @@ -188,7 +188,8 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref } @Test - public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative() throws PrefixDeclarationException { + public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative() + throws PrefixDeclarationException { String relativeIri = this.prefixDeclarations.absolutizeIri(RELATIVE); PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(BASE); @@ -198,13 +199,15 @@ public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative( @Test public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() throws PrefixDeclarationException { - assertEquals("", this.prefixDeclarations.getBaseIri()); + assertEquals("", this.prefixDeclarations.getBaseIri()); // FIXME: why test this? + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(BASE); String absoluteIri = prefixDeclarations.absolutizeIri(RELATIVE); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri); + assertNotEquals(RELATIVE, resolvedIri); - assertEquals("rulewerk_generated_0:" + RELATIVE, resolvedIri); + assertEquals("rw_gen0:" + RELATIVE, resolvedIri); } } From da8af909920e461aaf3d582c070ccfe0423ed225 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 14:38:56 +0200 Subject: [PATCH 0945/1255] generally support IRI unresolving --- .../core/model/api/PrefixDeclarationRegistry.java | 10 ++++++++++ .../AbstractPrefixDeclarationRegistry.java | 14 +++----------- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 3fdd3b939..9727b6e1b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -115,4 +115,14 @@ public interface PrefixDeclarationRegistry extends Iterable Date: Sat, 22 Aug 2020 17:25:50 +0200 Subject: [PATCH 0946/1255] avoid ambiguous abbreviated IRIs do not use local names like "123" or "true" --- .../implementation/AbstractPrefixDeclarationRegistry.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index b23166975..8186347b6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -104,6 +104,12 @@ public String unresolveAbsoluteIri(String iri) { if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.length() > baseIri.length() && iri.startsWith(baseIri)) { shortestIri = iri.substring(baseIri.length()); + // Only allow very simple names of this form, to avoid confusion, e.g., with + // numbers or boolean literals: + if (!shortestIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") || "true".equals(shortestIri) + || "false".equals(shortestIri)) { + shortestIri = iri; + } } for (Map.Entry entry : prefixes.entrySet()) { From a8faf2e516cdc4def2672da8eae18e482836678d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:05:54 +0200 Subject: [PATCH 0947/1255] Prepare new Serializer class --- .../client/shell/DefaultConfiguration.java | 4 +- .../core/model/api/AbstractConstant.java | 6 +- .../rulewerk/core/model/api/Command.java | 8 +- .../rulewerk/core/model/api/Conjunction.java | 4 +- .../rulewerk/core/model/api/DataSource.java | 9 + .../core/model/api/DataSourceDeclaration.java | 4 +- .../core/model/api/DatatypeConstant.java | 6 +- .../core/model/api/ExistentialVariable.java | 4 +- .../rulewerk/core/model/api/Fact.java | 4 +- .../model/api/LanguageStringConstant.java | 4 +- .../rulewerk/core/model/api/Literal.java | 4 +- .../rulewerk/core/model/api/NamedNull.java | 4 +- .../rulewerk/core/model/api/Predicate.java | 4 +- .../model/api/PrefixDeclarationRegistry.java | 6 +- .../rulewerk/core/model/api/Rule.java | 4 +- .../core/model/api/UniversalVariable.java | 4 +- .../AbstractPrefixDeclarationRegistry.java | 12 +- .../implementation/DatatypeConstantImpl.java | 2 +- .../LanguageStringConstantImpl.java | 2 +- .../core/model/implementation/Serializer.java | 1020 ++++++++--------- .../rulewerk/core/reasoner/KnowledgeBase.java | 12 +- .../rulewerk/core/reasoner/Reasoner.java | 6 +- .../implementation/CsvFileDataSource.java | 10 +- .../implementation/FileDataSource.java | 19 + .../implementation/QueryResultImpl.java | 2 +- .../implementation/RdfFileDataSource.java | 9 +- .../SparqlQueryResultDataSource.java | 25 +- .../core/model/DataSourceDeclarationTest.java | 6 +- .../MergingPrefixDeclarationRegistryTest.java | 26 +- .../rulewerk/core/model/SerializerTest.java | 214 ++++ .../parser/DefaultParserConfiguration.java | 8 +- 31 files changed, 812 insertions(+), 640 deletions(-) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 4cdc16ef7..927c61701 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -34,7 +34,7 @@ import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; public final class DefaultConfiguration { @@ -63,7 +63,7 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> Serializer.getCommandName(commandName)) + .map(commandName -> OldSerializer.getCommandName(commandName)) .collect(Collectors.toList()); return new StringsCompleter(serializedCommandNames); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 750434b73..70592a3e6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for abstract constants, i.e. for constants that represent an @@ -40,11 +40,11 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override default String getSyntacticRepresentation(Function iriTransformer) { - return Serializer.getString(this, iriTransformer); + return OldSerializer.getString(this, iriTransformer); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 3af723089..fce5aa32a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Class for representing a generic command that can be executed. @@ -72,15 +72,15 @@ public String getSyntacticRepresentation() { result.append(" "); if (argument.fromRule().isPresent()) { Rule rule = argument.fromRule().get(); - result.append(Serializer.getString(rule.getHead())).append(Serializer.RULE_SEPARATOR) - .append(Serializer.getString(rule.getBody())); + result.append(OldSerializer.getString(rule.getHead())).append(OldSerializer.RULE_SEPARATOR) + .append(OldSerializer.getString(rule.getBody())); } else if (argument.fromPositiveLiteral().isPresent()) { result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); } else { throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); } } - result.append(Serializer.STATEMENT_SEPARATOR); + result.append(OldSerializer.STATEMENT_SEPARATOR); return result.toString(); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index be4e9f9ee..2b7511580 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of @@ -43,7 +43,7 @@ public interface Conjunction extends Iterable, SyntaxObjec @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java index 402d80127..f8afe3ee8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java @@ -40,4 +40,13 @@ public default Optional getRequiredArity() { return Optional.empty(); } + /** + * Returns a fact that represents the declaration of this {@link DataSource}. + * Rulewerk syntax uses facts to specify the relevant parameters for data source + * declarations. + * + * @return {@link Fact} that contains the parameters of this data source + */ + public Fact getDeclarationFact(); + } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index da1837bba..c609e1a03 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -1,6 +1,6 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /*- * #%L @@ -47,6 +47,6 @@ public interface DataSourceDeclaration extends Statement, Entity { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index 28fbc87d5..8f4f7c414 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for datatype constants, i.e. for constants that represent a @@ -64,11 +64,11 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override default String getSyntacticRepresentation(Function iriTransformer) { - return Serializer.getString(this, iriTransformer); + return OldSerializer.getString(this, iriTransformer); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 4e7d60d78..35f08ab39 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for existentially quantified variables, i.e., variables that appear @@ -37,6 +37,6 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index 61a302e32..5b1fc9df2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -1,6 +1,6 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /*- * #%L @@ -33,7 +33,7 @@ public interface Fact extends PositiveLiteral, Statement { @Override default String getSyntacticRepresentation() { - return Serializer.getFactString(this); + return OldSerializer.getFactString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index fbd60d57b..2d7d082d5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for string constants with a language tag, used to represent values @@ -67,7 +67,7 @@ default String getDatatype() { @Override default String getSyntacticRepresentation() { - return Serializer.getConstantName(this); + return OldSerializer.getConstantName(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index b278f7722..d40d3a04a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for literals. A positive literal is simply an atomic formula, i.e., @@ -54,7 +54,7 @@ public interface Literal extends SyntaxObject, Entity { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index 4b1350265..b78a6be6d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to @@ -40,7 +40,7 @@ default TermType getType() { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index e34ec9b24..5ff3a4daa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A Predicate represents a relation between terms. Is uniquely identified by @@ -51,6 +51,6 @@ public interface Predicate extends Entity { @Override default String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 9727b6e1b..fd82ade16 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -119,10 +119,12 @@ public interface PrefixDeclarationRegistry extends Iterable" : iri; String baseIri = getBaseIri(); if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.length() > baseIri.length() && iri.startsWith(baseIri)) { - shortestIri = iri.substring(baseIri.length()); + String shorterIri = iri.substring(baseIri.length()); // Only allow very simple names of this form, to avoid confusion, e.g., with // numbers or boolean literals: - if (!shortestIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") || "true".equals(shortestIri) - || "false".equals(shortestIri)) { - shortestIri = iri; + if (shorterIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") && !"true".equals(shorterIri) + || !"false".equals(shorterIri)) { + shortestIri = shorterIri; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index f81e44681..f58e13002 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -65,7 +65,7 @@ public String toString() { @Override public String getName() { - return Serializer.getConstantName(this); + return OldSerializer.getConstantName(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 235bfcd31..8a7ae5ca1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return Serializer.getConstantName(this); + return OldSerializer.getConstantName(this); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 7ae74f80b..23d4c238c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,558 +1,462 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; - -/** - * A utility class with static methods to obtain the correct parsable string - * representation of the different data models. - * - * @author Ali Elhalawati - * - */ -public final class Serializer { - private static final String NEW_LINE = "\n"; - public static final String STATEMENT_SEPARATOR = " ."; - public static final String COMMA = ", "; - public static final String NEGATIVE_IDENTIFIER = "~"; - public static final String EXISTENTIAL_IDENTIFIER = "!"; - public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_:"; - public static final String OPENING_PARENTHESIS = "("; - public static final String CLOSING_PARENTHESIS = ")"; - public static final String OPENING_BRACKET = "["; - public static final String CLOSING_BRACKET = "]"; - public static final String RULE_SEPARATOR = " :- "; - public static final char AT = '@'; - public static final String DATA_SOURCE = "@source "; - public static final String BASE = "@base "; - public static final String PREFIX = "@prefix "; - public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String DATA_SOURCE_SEPARATOR = ": "; - public static final String COLON = ":"; - public static final String DOUBLE_CARET = "^^"; - public static final char LESS_THAN = '<'; - public static final char MORE_THAN = '>'; - public static final char QUOTE = '"'; - - public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String REGEX_INTEGER = "^[-+]?\\d+$"; - public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String REGEX_TRUE = "true"; - public static final String REGEX_FALSE = "false"; - - /** - * Constructor. - */ - private Serializer() { - - } - - /** - * Creates a String representation of a given {@link Rule}. - * - * @see Rule - * syntax - * @param rule a {@link Rule}. - * @return String representation corresponding to a given {@link Rule}. - * - */ - public static String getString(final Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link Conjunction}. - * - * @see Rule - * syntax - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. - */ - public static String getString(final Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Literal}. - * - * @see Rule - * syntax - * @param literal a {@link Literal} - * @return String representation corresponding to a given {@link Literal}. - */ - public static String getString(final Literal literal) { - final StringBuilder stringBuilder = new StringBuilder(""); - if (literal.isNegated()) { - stringBuilder.append(NEGATIVE_IDENTIFIER); - } - stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Fact}. - * - * @see Rule - * syntax - * @param fact a {@link Fact} - * @return String representation corresponding to a given {@link Fact}. - */ - public static String getFactString(final Fact fact) { - return getString(fact) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant, final Function iriTransformer) { - return getIRIString(constant.getName(), iriTransformer); - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant) { - return getIRIString(constant.getName()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - * - * @see Rule - * syntax - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - */ - public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
    - *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • - *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • - *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • - *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • - *
  • {@code "test"^^} results in - * {@code "test"^^}, modulo transformation of the datatype - * IRI.
  • - *
- * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { - return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } - - return getConstantName(datatypeConstant, iriTransformer); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
    - *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • - *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • - *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • - *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • - *
  • {@code "test"^^} results in - * {@code "test"^^}.
  • - *
- * - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant, Function.identity()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - private static String getConstantName(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + addAngleBrackets(datatypeConstant.getDatatype()); - } - - /** - * Creates a String representation of a given {@link ExistentialVariable}. - * - * @see Rule - * syntax - * @param existentialVariable a {@link ExistentialVariable} - * @return String representation corresponding to a given - * {@link ExistentialVariable}. - */ - public static String getString(final ExistentialVariable existentialVariable) { - return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); - } - - /** - * Creates a String representation of a given {@link UniversalVariable}. - * - * @see Rule - * syntax - * @param universalVariable a {@link UniversalVariable} - * @return String representation corresponding to a given - * {@link UniversalVariable}. - */ - public static String getString(final UniversalVariable universalVariable) { - return UNIVERSAL_IDENTIFIER + universalVariable.getName(); - } - - /** - * Creates a String representation of a given {@link NamedNull}. - * - * @see Rule - * syntax - * @param namedNull a {@link NamedNull} - * @return String representation corresponding to a given {@link NamedNull}. - */ - public static String getString(final NamedNull namedNull) { - return NAMEDNULL_IDENTIFIER + namedNull.getName(); - } - - /** - * Creates a String representation of a given {@link Predicate}. - * - * @see Rule - * syntax - * @param predicate a {@link Predicate} - * @return String representation corresponding to a given {@link Predicate}. - */ - public static String getString(final Predicate predicate) { - return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; - } - - /** - * Creates a String representation of a given {@link DataSourceDeclaration}. - * - * @see Rule - * syntax - * @param dataSourceDeclaration a {@link DataSourceDeclaration} - * @return String representation corresponding to a given - * {@link DataSourceDeclaration}. - */ - public static String getString(final DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR - + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link CsvFileDataSource}. - * - * @see Rule - * syntax - * - * @param csvFileDataSource - * @return String representation corresponding to a given - * {@link CsvFileDataSource}. - */ - public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given {@link RdfFileDataSource}. - * - * @see Rule - * syntax - * - * - * @param rdfFileDataSource - * @return String representation corresponding to a given - * {@link RdfFileDataSource}. - */ - public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given - * {@link SparqlQueryResultDataSource}. - * - * @see Rule - * syntax - * - * - * @param dataSource - * @return String representation corresponding to a given - * {@link SparqlQueryResultDataSource}. - */ - public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS - + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA - + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS; - } - - private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath()); - } - - private static String getIRIString(final String string) { - return getIRIString(string, Function.identity()); - } - - private static String getIRIString(final String string, final Function iriTransformer) { - final String transformed = iriTransformer.apply(string); - - if (!transformed.equals(string)) { - return transformed; - } - - if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) - || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { - return addAngleBrackets(string); - } - - return string; - } - - /** - * Constructs the parseable, serialized representation of given {@code string}. - * Escapes (with {@code \}) special character occurrences in given - * {@code string}, and surrounds the result with double quotation marks - * ({@code "}). The special characters are: - *
    - *
  • {@code \}
  • - *
  • {@code "}
  • - *
  • {@code \t}
  • - *
  • {@code \b}
  • - *
  • {@code \n}
  • - *
  • {@code \r}
  • - *
  • {@code \f}
  • - *
- * Example for {@code string = "\\a"}, the returned value is - * {@code string = "\"\\\\a\""} - * - * @param string - * @return an escaped string surrounded by {@code "}. - */ - public static String getString(final String string) { - return addQuotes(escape(string)); - } - - /** - * Escapes (with {@code \}) special character occurrences in given - * {@code string}. The special characters are: - *
    - *
  • {@code \}
  • - *
  • {@code "}
  • - *
  • {@code \t}
  • - *
  • {@code \b}
  • - *
  • {@code \n}
  • - *
  • {@code \r}
  • - *
  • {@code \f}
  • - *
- * - * @param string - * @return an escaped string - */ - private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); - // don't touch single quotes here since we only construct double-quoted strings - } - - private static String addQuotes(final String string) { - return QUOTE + string + QUOTE; - } - - private static String addAngleBrackets(final String string) { - return LESS_THAN + string + MORE_THAN; - } - - public static String getFactString(final Predicate predicate, final List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getFactString(final Predicate predicate, final List terms, - final Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getString(final Predicate predicate, final List terms) { - return getString(predicate, terms, Function.identity()); - } - - public static String getString(final Predicate predicate, final List terms, - final Function iriTransformer) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); - stringBuilder.append(OPENING_PARENTHESIS); - - boolean first = true; - for (final Term term : terms) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getSyntacticRepresentation(iriTransformer); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); - } - - public static String getBaseString(final KnowledgeBase knowledgeBase) { - final String baseIri = knowledgeBase.getBaseIri(); - - return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); - } - - private static String getBaseDeclarationString(final String baseIri) { - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getPrefixString(final Entry prefix) { - return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { - final StringBuilder sb = new StringBuilder(); - - sb.append(getBaseString(knowledgeBase)); - knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); - - return sb.toString(); - } - - public static String getCommandName(final String commandName) { - return AT + commandName; - } -} +package org.semanticweb.rulewerk.core.model.implementation; + +import java.io.IOException; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * Objects of this class are used to create string representations of syntactic + * objects. + * + * @see RuleWerk + * rule syntax + * + * @author Markus Kroetzsch + * + */ +public class Serializer { + + public static final String STATEMENT_END = " ."; + + /** + * Default IRI serializer that can be used if no abbreviations (prefixes, base, + * etc.) are used. + */ + public static final Function identityIriSerializer = new Function() { + @Override + public String apply(String iri) { + return iri.contains(":") ? "<" + iri + ">" : iri; + } + }; + + final Writer writer; + final Function iriTransformer; + final SerializerTermVisitor serializerTermVisitor = new SerializerTermVisitor(); + final SerializerStatementVisitor serializerStatementVisitor = new SerializerStatementVisitor(); + + /** + * Runtime exception used to report errors that occurred in visitors that do not + * declare checked exceptions. + * + * @author Markus Kroetzsch + * + */ + private class RuntimeIoException extends RuntimeException { + private static final long serialVersionUID = 1L; + final IOException cause; + + public RuntimeIoException(IOException cause) { + super(cause); + this.cause = cause; + } + + public IOException getIoException() { + return cause; + } + } + + /** + * Auxiliary class to visit {@link Term} objects for writing. + * + * @author Markus Kroetzsch + * + */ + private class SerializerTermVisitor implements TermVisitor { + + @Override + public Void visit(AbstractConstant term) { + try { + Serializer.this.writeAbstractConstant(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(DatatypeConstant term) { + try { + Serializer.this.writeDatatypeConstant(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(LanguageStringConstant term) { + try { + Serializer.this.writeLanguageStringConstant(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(UniversalVariable term) { + try { + Serializer.this.writeUniversalVariable(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(ExistentialVariable term) { + try { + Serializer.this.writeExistentialVariable(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(NamedNull term) { + try { + Serializer.this.writeNamedNull(term); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + } + + /** + * Auxiliary class to visit {@link Statement} objects for writing. + * + * @author Markus Kroetzsch + * + */ + private class SerializerStatementVisitor implements StatementVisitor { + + @Override + public Void visit(Fact statement) { + try { + Serializer.this.writeFact(statement); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(Rule statement) { + try { + Serializer.this.writeRule(statement); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(DataSourceDeclaration statement) { + try { + Serializer.this.writeDataSourceDeclaration(statement); + } catch (IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + } + + /** + * Construct a serializer that uses a specific function to serialize IRIs. + * + * @param writer the object used to write serializations + * @param iriTransformer a function used to abbreviate IRIs, e.g., if namespace + * prefixes were declared + */ + public Serializer(final Writer writer, final Function iriTransformer) { + this.writer = writer; + this.iriTransformer = iriTransformer; + } + + /** + * Construct a serializer that serializes IRIs without any form of + * transformation or abbreviation. + * + * @param writer the object used to write serializations + */ + public Serializer(final Writer writer) { + this(writer, identityIriSerializer); + } + + /** + * Construct a serializer that uses the given {@link PrefixDeclarationRegistry} + * to abbreviate IRIs. + * + * @param writer the object used to write serializations + * @param prefixDeclarationRegistry the object used to abbreviate IRIs + */ + public Serializer(final Writer writer, PrefixDeclarationRegistry prefixDeclarationRegistry) { + this(writer, (string) -> { + return prefixDeclarationRegistry.unresolveAbsoluteIri(string, true); + }); + } + + /** + * Writes a serialization of the given {@link Statement}. + * + * @param term a {@link Statement} + * @throws IOException + */ + public void writeStatement(Statement statement) throws IOException { + try { + statement.accept(this.serializerStatementVisitor); + } catch (Serializer.RuntimeIoException e) { + throw e.getIoException(); + } + } + + /** + * Writes a serialization of the given {@link Fact}. + * + * @param fact a {@link Fact} + * @throws IOException + */ + public void writeFact(Fact fact) throws IOException { + writeLiteral(fact); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Rule}. + * + * @param rule a {@link Rule} + * @throws IOException + */ + public void writeRule(Rule rule) throws IOException { + writeLiteralConjunction(rule.getHead()); + writer.write(" :- "); + writeLiteralConjunction(rule.getBody()); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link DataSourceDeclaration}. + * + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @throws IOException + */ + public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclaration) throws IOException { + writer.write("@source "); + writePredicate(dataSourceDeclaration.getPredicate()); + writer.write(": "); + writeLiteral(dataSourceDeclaration.getDataSource().getDeclarationFact()); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Literal}. + * + * @param literal a {@link Literal} + * @throws IOException + */ + public void writeLiteral(Literal literal) throws IOException { + writer.write(getIri(literal.getPredicate().getName())); + writer.write("("); + + boolean first = true; + for (final Term term : literal.getArguments()) { + if (first) { + first = false; + } else { + writer.write(", "); + } + writeTerm(term); + } + + writer.write(")"); + } + + /** + * Writes a serialization of the given {@link Conjunction} of {@link Literal} + * objects. + * + * @param literals a {@link Conjunction} + * @throws IOException + */ + public void writeLiteralConjunction(final Conjunction literals) throws IOException { + boolean first = true; + for (final Literal literal : literals.getLiterals()) { + if (first) { + first = false; + } else { + writer.write(", "); + } + writeLiteral(literal); + } + } + + /** + * Writes a serialization of the given {@link Predicate}. This serialization + * specifies the name and arity of the predicate. + * + * @param predicate a {@link Predicate} + * @throws IOException + */ + public void writePredicate(Predicate predicate) throws IOException { + writer.write(getIri(predicate.getName())); + writer.write("["); + writer.write(String.valueOf(predicate.getArity())); + writer.write("]"); + } + + /** + * Writes a serialization of the given {@link Term}. + * + * @param term a {@link Term} + * @throws IOException + */ + public void writeTerm(Term term) throws IOException { + try { + term.accept(this.serializerTermVisitor); + } catch (Serializer.RuntimeIoException e) { + throw e.getIoException(); + } + } + + /** + * Writes a serialization of the given {@link AbstractConstant}. + * + * @param abstractConstant a {@link AbstractConstant} + * @throws IOException + */ + public void writeAbstractConstant(AbstractConstant abstractConstant) throws IOException { + writer.write(getIri(abstractConstant.getName())); + } + + /** + * Writes a serialization of the given {@link DatatypeConstant}. + * + * @param datatypeConstant a {@link DatatypeConstant} + * @throws IOException + */ + public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOException { + if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) { + writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + } else if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype())) { + writer.write(datatypeConstant.getLexicalValue()); + } else { + writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + writer.write("^^"); + writer.write(getIri(datatypeConstant.getDatatype())); + } + } + + /** + * Writes a serialization of the given {@link UniversalVariable}. + * + * @param universalVariable a {@link UniversalVariable} + * @throws IOException + */ + public void writeUniversalVariable(UniversalVariable universalVariable) throws IOException { + writer.write("?"); + writer.write(universalVariable.getName()); + } + + /** + * Writes a serialization of the given {@link ExistentialVariable}. + * + * @param existentialVariable a {@link ExistentialVariable} + * @throws IOException + */ + public void writeExistentialVariable(ExistentialVariable existentialVariable) throws IOException { + writer.write("!"); + writer.write(existentialVariable.getName()); + } + + /** + * Writes a serialization of the given {@link NamedNull}. + * + * @param namedNull a {@link NamedNull} + * @throws IOException + */ + public void writeNamedNull(NamedNull namedNull) throws IOException { + writer.write("_:"); + writer.write(namedNull.getName()); + } + + /** + * Writes a serialization of the given {@link LanguageStringConstant}. + * + * @param languageStringConstant a {@link LanguageStringConstant} + * @throws IOException + */ + public void writeLanguageStringConstant(LanguageStringConstant languageStringConstant) throws IOException { + writer.write(getQuotedString(languageStringConstant.getString())); + writer.write("@"); + writer.write(languageStringConstant.getLanguageTag()); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
    + *
  • {@code \}
  • + *
  • {@code "}
  • + *
  • {@code \t}
  • + *
  • {@code \b}
  • + *
  • {@code \n}
  • + *
  • {@code \r}
  • + *
  • {@code \f}
  • + *
+ * + * @param string + * @return an escaped string + */ + private String getQuotedString(final String string) { + return "\"" + string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f") + "\""; + } + + private String getIri(final String string) { + return iriTransformer.apply(string); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index b9f1d3ee9..e81407f7b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -53,7 +53,7 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -577,7 +577,7 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE * declared prefixes, or {@code iri} if no suitable prefix is declared. */ public String unresolveAbsoluteIri(String iri) { - return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); + return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri, false); } /** @@ -589,20 +589,20 @@ public String unresolveAbsoluteIri(String iri) { * stream */ public void writeKnowledgeBase(Writer writer) throws IOException { - writer.write(Serializer.getBaseAndPrefixDeclarations(this)); + writer.write(OldSerializer.getBaseAndPrefixDeclarations(this)); for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { - writer.write(Serializer.getString(dataSource)); + writer.write(OldSerializer.getString(dataSource)); writer.write('\n'); } for (Fact fact : this.getFacts()) { - writer.write(Serializer.getFactString(fact)); + writer.write(OldSerializer.getFactString(fact)); writer.write('\n'); } for (Rule rule : this.getRules()) { - writer.write(Serializer.getString(rule)); + writer.write(OldSerializer.getString(rule)); writer.write('\n'); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 276398c62..ff9a2d8a0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -42,7 +42,7 @@ import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * Interface that exposes the (existential) rule reasoning capabilities of a @@ -133,9 +133,9 @@ default Correctness unsafeForEachInference(BiConsumer> act */ default Correctness writeInferences(OutputStream stream) throws IOException { final KnowledgeBase knowledgeBase = getKnowledgeBase(); - stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); + stream.write(OldSerializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); return forEachInference((predicate, termList) -> stream - .write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); + .write(OldSerializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 7f72f25de..c3347b47a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -23,7 +23,8 @@ import java.io.IOException; import java.util.Arrays; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of @@ -74,11 +75,16 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } + + @Override + String getDeclarationPredicateName() { + return "load-csv"; + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 46ec295de..68cbf2f68 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -27,6 +27,10 @@ import java.util.stream.StreamSupport; import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** * A {@code FileDataSource} is an abstract implementation of a storage for fact @@ -113,4 +117,19 @@ public boolean equals(final Object obj) { return this.file.equals(other.getFile()); } + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate(getDeclarationPredicateName(), 1); + return Expressions.makeFact(predicate, + Expressions.makeDatatypeConstant(getPath(), PrefixDeclarationRegistry.XSD_STRING)); + } + + /** + * Returns the name of the predicate that is used to define a declaration of + * this data source. + * + * @return + */ + abstract String getDeclarationPredicateName(); + } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 57231434a..f7f933253 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -73,7 +73,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return "QueryResult [terms=" + this.terms + "]"; + return this.terms.toString(); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 9df6c5d26..3dc4c9d2b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -24,7 +24,7 @@ import java.util.Arrays; import java.util.Optional; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside @@ -73,7 +73,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override @@ -85,4 +85,9 @@ public Optional getRequiredArity() { public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); } + + @Override + String getDeclarationPredicateName() { + return "load-rdf"; + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 99f8548e0..b89e22068 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -26,8 +26,13 @@ import java.util.Optional; import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a @@ -38,8 +43,6 @@ */ public class SparqlQueryResultDataSource implements ReasonerDataSource { - - private final URL endpoint; private final String queryVariables; private final String queryBody; @@ -82,8 +85,8 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc - public SparqlQueryResultDataSource(final URL endpoint, - final LinkedHashSet queryVariables, final String queryBody) { + public SparqlQueryResultDataSource(final URL endpoint, final LinkedHashSet queryVariables, + final String queryBody) { Validate.notNull(endpoint, "Endpoint cannot be null."); Validate.notNull(queryVariables, "Query variables ordered set cannot be null."); Validate.noNullElements(queryVariables, "Query variables cannot be null or contain null elements."); @@ -159,7 +162,7 @@ public String toString() { @Override public String getSyntacticRepresentation() { - return Serializer.getString(this); + return OldSerializer.getString(this); } @Override @@ -167,4 +170,14 @@ public void accept(DataSourceConfigurationVisitor visitor) { visitor.visit(this); } + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate("sparql", 3); + Term endpointTerm = Expressions.makeAbstractConstant(getEndpoint().toString()); + Term variablesTerm = Expressions.makeDatatypeConstant(getQueryVariables(), + PrefixDeclarationRegistry.XSD_STRING); + Term patternTerm = Expressions.makeDatatypeConstant(getQueryBody(), PrefixDeclarationRegistry.XSD_STRING); + return Expressions.makeFact(predicate, endpointTerm, variablesTerm, patternTerm); + } + } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index dfd658443..3c415a190 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -33,7 +33,7 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -89,7 +89,7 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + fileName); + final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @@ -114,7 +114,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + fileName); + final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 9bf049824..6705ba25d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -91,7 +91,7 @@ public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws Pref String prefix = "eg:"; prefixDeclarations.setPrefixIri(prefix, BASE); String resolved = BASE + RELATIVE; - String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); + String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved, false); assertEquals(resolved, prefixDeclarations.resolvePrefixedName(unresolved)); } @@ -133,49 +133,49 @@ public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPref @Test public void unresolveAbsoluteIri_default_identical() { - assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE, false)); } @Test public void unresolveAbsoluteIri_declaredPrefix_succeeds() { - assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); } @Test public void unresolveAbsoluteIri_unrelatedPrefix_identical() { prefixDeclarations.setPrefixIri("eg:", UNRELATED); - assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); } @Test public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { prefixDeclarations.setPrefixIri("ex:", UNRELATED); prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); } @Test public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("ex:", MORE_SPECIFIC); - assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); + assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); prefixDeclarations.setPrefixIri("er:", EVEN_MORE_SPECIFIC); - assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); + assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); } @Test public void unresolveAbsoluteIri_exactPrefixMatch_identical() { prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE, false)); } @Test public void unresolveAbsoluteIri_baseIsMoreSpecific_baseWins() { prefixDeclarations.setBaseIri(MORE_SPECIFIC); prefixDeclarations.setPrefixIri("eg:", BASE); - assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); } @Test @@ -184,7 +184,7 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref prefixDeclarations.setPrefixIri(prefix, BASE); String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); - assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); + assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved, false)); } @Test @@ -194,7 +194,7 @@ public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative( PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(BASE); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); - assertEquals(relativeIri, this.prefixDeclarations.unresolveAbsoluteIri(relativeIri)); + assertEquals(relativeIri, this.prefixDeclarations.unresolveAbsoluteIri(relativeIri, false)); } @Test @@ -205,7 +205,7 @@ public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() prefixDeclarations.setBaseIri(BASE); String absoluteIri = prefixDeclarations.absolutizeIri(RELATIVE); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); - String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri); + String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri, false); assertNotEquals(RELATIVE, resolvedIri); assertEquals("rw_gen0:" + RELATIVE, resolvedIri); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java new file mode 100644 index 000000000..fbcde3f04 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -0,0 +1,214 @@ +package org.semanticweb.rulewerk.core.model; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; +import java.net.URL; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class SerializerTest { + + static Term abstractConstant = Expressions.makeAbstractConstant("http://example.org/test"); + static Term abstractConstantShort = Expressions.makeAbstractConstant("c"); + static Term existentialVariable = Expressions.makeExistentialVariable("X"); + static Term universalVariable = Expressions.makeUniversalVariable("X"); + static Term languageStringConstant = Expressions.makeLanguageStringConstant("abc", "de"); + static Term datatypeConstantGeneral = Expressions.makeDatatypeConstant("abc", "http://example.org/test"); + static Term datatypeConstantString = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); + static Term datatypeConstantInteger = Expressions.makeDatatypeConstant("123", + PrefixDeclarationRegistry.XSD_INTEGER); + static Term namedNull = new NamedNullImpl("n1"); + + static Predicate p1 = Expressions.makePredicate("p1", 1); + static Predicate p2 = Expressions.makePredicate("p2", 2); + static Predicate p3 = Expressions.makePredicate("p3", 3); + + static Fact fact = Expressions.makeFact(p1, abstractConstantShort); + static PositiveLiteral l1 = Expressions.makePositiveLiteral(p1, universalVariable); + static Literal l2 = Expressions.makePositiveLiteral(p2, universalVariable, abstractConstantShort); + static Rule rule = Expressions.makeRule(l1, l2, fact); + + StringWriter writer; + Serializer serializer; + + @Before + public void init() { + writer = new StringWriter(); + serializer = new Serializer(writer); + } + + private Serializer getThrowingSerializer() throws IOException { + Writer writerMock = Mockito.mock(Writer.class); + Mockito.doThrow(IOException.class).when(writerMock).write(Mockito.anyString()); + return new Serializer(writerMock); + } + + @Test + public void serializeDatatypeConstant() throws IOException { + serializer.writeTerm(datatypeConstantGeneral); + assertEquals("\"abc\"^^", writer.toString()); + } + + @Test + public void serializeDatatypeConstantString() throws IOException { + serializer.writeTerm(datatypeConstantString); + assertEquals("\"abc\"", writer.toString()); + } + + @Test + public void serializeDatatypeConstantInteger() throws IOException { + serializer.writeTerm(datatypeConstantInteger); + assertEquals("123", writer.toString()); + } + + @Test + public void serializeExistentialVariable() throws IOException { + serializer.writeTerm(existentialVariable); + assertEquals("!X", writer.toString()); + } + + @Test + public void serializeUniversalVariable() throws IOException { + serializer.writeTerm(universalVariable); + assertEquals("?X", writer.toString()); + } + + @Test + public void serializeLanguageStringConstant() throws IOException { + serializer.writeTerm(languageStringConstant); + assertEquals("\"abc\"@de", writer.toString()); + } + + @Test + public void serializeNamedNull() throws IOException { + serializer.writeTerm(namedNull); + assertEquals("_:n1", writer.toString()); + } + + @Test + public void serializeFact() throws IOException { + serializer.writeStatement(fact); + assertEquals("p1(c) .", writer.toString()); + } + + @Test + public void serializeRule() throws IOException { + serializer.writeStatement(rule); + assertEquals("p1(?X) :- p2(?X, c), p1(c) .", writer.toString()); + } + + @Test + public void serializeCsvDataSourceDeclaration() throws IOException { + DataSourceDeclaration csvSourceDecl = new DataSourceDeclarationImpl(p1, new CsvFileDataSource("test.csv")); + serializer.writeStatement(csvSourceDecl); + assertEquals("@source p1[1]: load-csv(\"test.csv\") .", writer.toString()); + } + + @Test + public void serializeRdfDataSourceDeclaration() throws IOException { + DataSourceDeclaration rdfSourceDecl = new DataSourceDeclarationImpl(p3, new RdfFileDataSource("test.nt")); + serializer.writeStatement(rdfSourceDecl); + assertEquals("@source p3[3]: load-rdf(\"test.nt\") .", writer.toString()); + } + + @Test + public void serializeSparqlDataSourceDeclaration() throws IOException { + DataSourceDeclaration sparqlSourceDecl = new DataSourceDeclarationImpl(p1, + new SparqlQueryResultDataSource(new URL("http://example.org"), "var", "?var ")); + serializer.writeStatement(sparqlSourceDecl); + assertEquals("@source p1[1]: sparql(, \"var\", \"?var \") .", writer.toString()); + } + + @Test + public void serializeAbstractConstantWithPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writeTerm(abstractConstant); + assertEquals("eg:test", writer.toString()); + } + + @Test + public void serializeDatatypeConstantWithPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writeTerm(datatypeConstantGeneral); + assertEquals("\"abc\"^^eg:test", writer.toString()); + } + + @Test + public void createThrowingSerializer_succeeds() throws IOException { + getThrowingSerializer(); + } + + @Test(expected = IOException.class) + public void serializeAbstractConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(abstractConstant); + } + + @Test(expected = IOException.class) + public void serializeDatatypeConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(datatypeConstantGeneral); + } + + @Test(expected = IOException.class) + public void serializeExistentialVariable_fails() throws IOException { + getThrowingSerializer().writeTerm(existentialVariable); + } + + @Test(expected = IOException.class) + public void serializeUniversalVariable_fails() throws IOException { + getThrowingSerializer().writeTerm(universalVariable); + } + + @Test(expected = IOException.class) + public void serializeLanguageStringConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(languageStringConstant); + } + + @Test(expected = IOException.class) + public void serializeNamedNull_fails() throws IOException { + getThrowingSerializer().writeTerm(namedNull); + } + + @Test(expected = IOException.class) + public void serializeFact_fails() throws IOException { + getThrowingSerializer().writeStatement(fact); + } + + @Test(expected = IOException.class) + public void serializeRule_fails() throws IOException { + getThrowingSerializer().writeStatement(rule); + } + + @Test(expected = IOException.class) + public void serializeDataSourceDeclaration_fails() throws IOException { + DataSourceDeclaration csvSourceDecl = new DataSourceDeclarationImpl(p1, new CsvFileDataSource("test.csv")); + getThrowingSerializer().writeStatement(csvSourceDecl); + } + +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index cf45c534a..ca4561dac 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; @@ -43,9 +43,9 @@ public DefaultParserConfiguration() { * Register built-in data sources (currently CSV, RDF, SPARQL). */ private void registerDefaultDataSources() { - registerDataSource(Serializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); - registerDataSource(Serializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); - registerDataSource(Serializer.SPARQL_QUERY_RESULT_DATA_SOURCE, + registerDataSource(OldSerializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); + registerDataSource(OldSerializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); + registerDataSource(OldSerializer.SPARQL_QUERY_RESULT_DATA_SOURCE, new SparqlQueryResultDataSourceDeclarationHandler()); } From 3f0e349dab984124f9b76e29001f2b78669d5e6e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:13:44 +0200 Subject: [PATCH 0948/1255] Update renaming of old serializer --- .../reasoner/vlog/VLogToModelConverter.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index f313fdefd..3b7876dda 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -30,7 +30,7 @@ import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** @@ -103,19 +103,19 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { */ static Constant toConstant(String vLogConstantName) { final Constant constant; - if (vLogConstantName.charAt(0) == Serializer.LESS_THAN - && vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { + if (vLogConstantName.charAt(0) == OldSerializer.LESS_THAN + && vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { // strip <> off of IRIs constant = new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); - } else if (vLogConstantName.charAt(0) == Serializer.QUOTE) { - if (vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { - final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.LESS_THAN, + } else if (vLogConstantName.charAt(0) == OldSerializer.QUOTE) { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { + final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.LESS_THAN, vLogConstantName.length() - 2); final String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); final String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); constant = new DatatypeConstantImpl(lexicalValue, datatype); } else { - final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.AT, vLogConstantName.length() - 2); + final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.AT, vLogConstantName.length() - 2); if (startTypeIdx > -1) { final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); final String string = vLogConstantName.substring(1, startTypeIdx - 1); From cecd42828267cdf2a715abcb71b13785783d1390 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:13:51 +0200 Subject: [PATCH 0949/1255] license header --- .../rulewerk/core/model/SerializerTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index fbcde3f04..d8a4fef62 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.model; +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import java.io.IOException; From ce3bdee5f9de749b88173a7689f9f9cf60b64377 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:50:27 +0200 Subject: [PATCH 0950/1255] serialize prefixes correctly --- .../AbstractPrefixDeclarationRegistry.java | 18 +++++++++-- .../core/model/implementation/Serializer.java | 31 +++++++++++++++++++ .../rulewerk/core/reasoner/KnowledgeBase.java | 14 +++++---- .../rulewerk/core/model/SerializerTest.java | 13 +++++++- .../core/reasoner/KnowledgeBaseTest.java | 10 ++++-- 5 files changed, 74 insertions(+), 12 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index d1565ea81..31124e5d5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -28,6 +28,7 @@ import java.util.Map.Entry; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** @@ -98,10 +99,23 @@ public String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarat @Override public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { - String shortestIri = addIriBrackets ? "<" + iri + ">" : iri; + String shortestIri; + if (addIriBrackets) { + if (!iri.contains(":")) { + shortestIri = iri; + if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { + throw new RulewerkRuntimeException("Relative IRIs cannot be serialized when a base is declared."); + } + } else { + shortestIri = "<" + iri + ">"; + } + } else { + shortestIri = iri; + } + String baseIri = getBaseIri(); - if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.length() > baseIri.length() + if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri) && iri.length() > baseIri.length() && iri.startsWith(baseIri)) { String shorterIri = iri.substring(baseIri.length()); // Only allow very simple names of this form, to avoid confusion, e.g., with diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 23d4c238c..1cdd5d3df 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -23,6 +23,8 @@ */ import java.io.Writer; +import java.util.Iterator; +import java.util.Map.Entry; import java.util.function.Function; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; @@ -423,6 +425,35 @@ public void writeNamedNull(NamedNull namedNull) throws IOException { writer.write(namedNull.getName()); } + /** + * Writes a serialization of the given {@link PrefixDeclarationRegistry}. + * + * @param prefixDeclarationRegistry a {@link PrefixDeclarationRegistry} + * @throws IOException + */ + public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) throws IOException { + final String baseIri = prefixDeclarationRegistry.getBaseIri(); + if (!PrefixDeclarationRegistry.EMPTY_BASE.contentEquals(baseIri)) { + writer.write("@base <"); + writer.write(baseIri); + writer.write(">"); + writer.write(STATEMENT_END); + writer.write("\n"); + } + + Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); + while (prefixIterator.hasNext()) { + Entry entry = prefixIterator.next(); + writer.write("@prefix "); + writer.write(entry.getKey()); + writer.write(" <"); + writer.write(entry.getValue()); + writer.write(">"); + writer.write(STATEMENT_END); + writer.write("\n"); + } + } + /** * Writes a serialization of the given {@link LanguageStringConstant}. * diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index e81407f7b..698683e22 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -53,7 +53,7 @@ import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -589,20 +589,22 @@ public String unresolveAbsoluteIri(String iri) { * stream */ public void writeKnowledgeBase(Writer writer) throws IOException { - writer.write(OldSerializer.getBaseAndPrefixDeclarations(this)); + Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); - for (DataSourceDeclaration dataSource : this.getDataSourceDeclarations()) { - writer.write(OldSerializer.getString(dataSource)); + serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + + for (DataSourceDeclaration dataSourceDeclaration : this.getDataSourceDeclarations()) { + serializer.writeDataSourceDeclaration(dataSourceDeclaration); writer.write('\n'); } for (Fact fact : this.getFacts()) { - writer.write(OldSerializer.getFactString(fact)); + serializer.writeFact(fact); writer.write('\n'); } for (Rule rule : this.getRules()) { - writer.write(OldSerializer.getString(rule)); + serializer.writeRule(rule); writer.write('\n'); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index d8a4fef62..6a41d37fc 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -180,11 +180,22 @@ public void serializeDatatypeConstantWithPrefixDeclarations() throws IOException assertEquals("\"abc\"^^eg:test", writer.toString()); } + @Test + public void serializePrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setBaseIri("http://example.org/base"); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writePrefixDeclarationRegistry(prefixes); + assertEquals("@base .\n@prefix eg: .\n", writer.toString()); + } + @Test public void createThrowingSerializer_succeeds() throws IOException { getThrowingSerializer(); } - + @Test(expected = IOException.class) public void serializeAbstractConstant_fails() throws IOException { getThrowingSerializer().writeTerm(abstractConstant); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 91a350b65..ea3254874 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -31,6 +31,7 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Rule; @@ -137,15 +138,18 @@ public void writeKnowledgeBase_justFacts_succeeds() throws IOException { assertEquals("P(c) .\nP(d) .\nQ(c) .\n", writer.toString()); } - @Test - public void writeKnowledgeBase_withBase_succeeds() throws IOException { + @Test(expected = RulewerkRuntimeException.class) + public void writeKnowledgeBase_withBase_fails() throws IOException { String baseIri = "https://example.org/"; MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setBaseIri(baseIri); this.kb.mergePrefixDeclarations(prefixDeclarations); StringWriter writer = new StringWriter(); this.kb.writeKnowledgeBase(writer); - assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", writer.toString()); + //// This would be incorrect, since parsing this would lead to another KB + //// that uses IRIs like : + // assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", + // writer.toString()); } @Test From 54a4b931cbc473afd9599b32f74bf000eeaf7b54 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sat, 22 Aug 2020 23:58:50 +0200 Subject: [PATCH 0951/1255] implement missing method --- .../rulewerk/reasoner/vlog/VLogInMemoryDataSource.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java index c6952d09f..4952cfb84 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -23,6 +23,7 @@ import java.util.Arrays; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; /** @@ -88,4 +89,9 @@ public String getSyntacticRepresentation() { public void accept(DataSourceConfigurationVisitor visitor) { visitor.visit(this); } + + @Override + public Fact getDeclarationFact() { + throw new UnsupportedOperationException("VLogInMemoryDataSource is cannot be serialized."); + } } From 0c776df0267001cf0882c4228186e536ec8eae80 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 00:16:11 +0200 Subject: [PATCH 0952/1255] support serialization to string --- .../core/model/implementation/Serializer.java | 27 +++++++++++++++++++ .../rulewerk/core/model/SerializerTest.java | 1 + 2 files changed, 28 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 1cdd5d3df..a624590be 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,6 +1,7 @@ package org.semanticweb.rulewerk.core.model.implementation; import java.io.IOException; +import java.io.StringWriter; /*- * #%L @@ -71,6 +72,14 @@ public String apply(String iri) { } }; + /** + * Interface for a method that writes something to a writer. + */ + @FunctionalInterface + public interface SerializationWriter { + void write(final Serializer serializer) throws IOException; + } + final Writer writer; final Function iriTransformer; final SerializerTermVisitor serializerTermVisitor = new SerializerTermVisitor(); @@ -466,6 +475,24 @@ public void writeLanguageStringConstant(LanguageStringConstant languageStringCon writer.write(languageStringConstant.getLanguageTag()); } + /** + * Convenience method for obtaining serializations as Java strings. + * + * @param writeAction a function that accepts a {@link Serializer} and produces + * a string + * @return serialization string + */ + public static String getSerialization(SerializationWriter writeAction) { + final StringWriter stringWriter = new StringWriter(); + final Serializer serializer = new Serializer(stringWriter); + try { + writeAction.write(serializer); + } catch (IOException e) { + throw new RuntimeException("StringWriter should never throw an IOException."); + } + return stringWriter.toString(); + } + /** * Escapes (with {@code \}) special character occurrences in given * {@code string}. The special characters are: diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 6a41d37fc..871fec96f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -130,6 +130,7 @@ public void serializeNamedNull() throws IOException { public void serializeFact() throws IOException { serializer.writeStatement(fact); assertEquals("p1(c) .", writer.toString()); + assertEquals("p1(c) .", Serializer.getSerialization(serializer -> serializer.writeFact(fact))); } @Test From 19b00b856c291cab68a72a7f0946379144cfba02 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:37:59 +0200 Subject: [PATCH 0953/1255] use new Serializer --- .../core/model/api/AbstractConstant.java | 13 - .../rulewerk/core/model/api/Command.java | 3 +- .../rulewerk/core/model/api/Conjunction.java | 7 - .../core/model/api/DataSourceDeclaration.java | 6 - .../core/model/api/DatatypeConstant.java | 13 - .../rulewerk/core/model/api/Entity.java | 6 - .../core/model/api/ExistentialVariable.java | 6 - .../rulewerk/core/model/api/Fact.java | 7 - .../model/api/LanguageStringConstant.java | 13 - .../rulewerk/core/model/api/Literal.java | 7 - .../rulewerk/core/model/api/NamedNull.java | 13 - .../rulewerk/core/model/api/Predicate.java | 7 - .../rulewerk/core/model/api/Rule.java | 7 - .../rulewerk/core/model/api/Term.java | 11 - .../core/model/api/UniversalVariable.java | 6 - .../rulewerk/core/model/api/Variable.java | 6 - .../implementation/AbstractConstantImpl.java | 2 +- .../implementation/AbstractLiteralImpl.java | 2 +- .../AbstractPrefixDeclarationRegistry.java | 11 +- .../model/implementation/ConjunctionImpl.java | 2 +- .../DataSourceDeclarationImpl.java | 2 +- .../implementation/DatatypeConstantImpl.java | 4 +- .../ExistentialVariableImpl.java | 2 +- .../core/model/implementation/FactImpl.java | 2 +- .../LanguageStringConstantImpl.java | 4 +- .../model/implementation/NamedNullImpl.java | 2 +- .../model/implementation/OldSerializer.java | 560 ++++++++++++++++++ .../model/implementation/PredicateImpl.java | 2 +- .../core/model/implementation/RuleImpl.java | 2 +- .../core/model/implementation/Serializer.java | 26 +- .../implementation/UniversalVariableImpl.java | 2 +- .../rulewerk/core/reasoner/Reasoner.java | 33 +- .../implementation/CsvFileDataSource.java | 15 +- .../implementation/RdfFileDataSource.java | 14 +- .../SparqlQueryResultDataSource.java | 13 +- .../core/model/DataSourceDeclarationTest.java | 5 +- .../rulewerk/core/model/SerializerTest.java | 19 + .../rulewerk/core/model/TermImplTest.java | 6 +- .../parser/DefaultParserConfiguration.java | 11 +- .../rulewerk/parser/EntityTest.java | 20 +- .../rulewerk/parser/RuleParserTest.java | 6 +- .../reasoner/vlog/VLogInMemoryDataSource.java | 2 +- .../reasoner/vlog/VLogToModelConverter.java | 31 +- .../vlog/VLogReasonerWriteInferencesTest.java | 22 +- 44 files changed, 713 insertions(+), 240 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 70592a3e6..5d43e7cf3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for abstract constants, i.e. for constants that represent an * abstract domain element (in contrast to a specific value of a concrete @@ -38,13 +34,4 @@ default TermType getType() { return TermType.ABSTRACT_CONSTANT; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return OldSerializer.getString(this, iriTransformer); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index fce5aa32a..1a5dc295b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -64,7 +64,6 @@ public List getArguments() { return arguments; } - @Override public String getSyntacticRepresentation() { StringBuilder result = new StringBuilder("@"); result.append(name); @@ -75,7 +74,7 @@ public String getSyntacticRepresentation() { result.append(OldSerializer.getString(rule.getHead())).append(OldSerializer.RULE_SEPARATOR) .append(OldSerializer.getString(rule.getBody())); } else if (argument.fromPositiveLiteral().isPresent()) { - result.append(argument.fromPositiveLiteral().get().getSyntacticRepresentation()); + result.append(argument.fromPositiveLiteral().get().toString()); } else { throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 2b7511580..60b53ea1a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -22,8 +22,6 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of * (negated or positive) atomic formulas that are connected with logical AND. @@ -41,9 +39,4 @@ public interface Conjunction extends Iterable, SyntaxObjec */ List getLiterals(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index c609e1a03..14fb54412 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /*- * #%L * Rulewerk Core Components @@ -45,8 +43,4 @@ public interface DataSourceDeclaration extends Statement, Entity { */ DataSource getDataSource(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index 8f4f7c414..0de18e509 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for datatype constants, i.e. for constants that represent a * specific value of a concrete datatype). Such terms are of type @@ -62,13 +58,4 @@ default TermType getType() { */ String getLexicalValue(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return OldSerializer.getString(this, iriTransformer); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index 431b90299..c4bfd7a16 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -27,11 +27,5 @@ * */ public interface Entity { - /** - * Returns the parsable string representation of an Entity. - * - * @return non-empty String - */ - String getSyntacticRepresentation(); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 35f08ab39..1b47a54c7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -20,8 +20,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for existentially quantified variables, i.e., variables that appear * in the scope of an (implicit) existential quantifier in a rule. @@ -35,8 +33,4 @@ default TermType getType() { return TermType.EXISTENTIAL_VARIABLE; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index 5b1fc9df2..d3b938897 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /*- * #%L * Rulewerk Core Components @@ -31,9 +29,4 @@ */ public interface Fact extends PositiveLiteral, Statement { - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getFactString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index 2d7d082d5..e19bf3517 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for string constants with a language tag, used to represent values * of type http://www.w3.org/1999/02/22-rdf-syntax-ns#langString in RDF, OWL, @@ -65,13 +61,4 @@ default String getDatatype() { */ String getLanguageTag(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getConstantName(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index d40d3a04a..896dc1f6f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -22,8 +22,6 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for literals. A positive literal is simply an atomic formula, i.e., * a formula of the form P(t1,...,tn) where P is a {@link Predicate} of arity n @@ -52,9 +50,4 @@ public interface Literal extends SyntaxObject, Entity { */ List getArguments(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index b78a6be6d..c4f6354cb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -20,10 +20,6 @@ * #L% */ -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to * represent anonymous domain elements introduced during the reasoning process @@ -38,13 +34,4 @@ default TermType getType() { return TermType.NAMED_NULL; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index 5ff3a4daa..7ed7d48f8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -20,8 +20,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * A Predicate represents a relation between terms. Is uniquely identified by * its name and arity. The arity determines the number of terms allowed in the @@ -48,9 +46,4 @@ public interface Predicate extends Entity { * @return the arity of the Predicate. */ int getArity(); - - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 6a49dd596..6b4e0ea79 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /*- * #%L * Rulewerk Core Components @@ -48,9 +46,4 @@ public interface Rule extends SyntaxObject, Statement, Entity { */ Conjunction getBody(); - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index c8aabae47..3bbabcfe4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -20,8 +20,6 @@ * #L% */ -import java.util.function.Function; - /** * Interface for terms. A term is characterized by a string name and a * {@link TermType}. @@ -73,13 +71,4 @@ default boolean isVariable() { */ T accept(TermVisitor termVisitor); - /** - * Return the parseable string representation of this Term, transforming IRIs. - * - * @param iriTransformer a function that is applied to transform any IRIs - * occurring in the output. - * - * @return non-empty String containing the representation. - */ - String getSyntacticRepresentation(Function iriTransformer); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java index df279681e..74e95c6ae 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java @@ -20,8 +20,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * Interface for universally quantified variables, i.e., variables that appear * in the scope of an (implicit) universal quantifier in a rule. @@ -35,8 +33,4 @@ default TermType getType() { return TermType.UNIVERSAL_VARIABLE; } - @Override - default String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java index ba0785752..c68d289c2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java @@ -20,8 +20,6 @@ * #L% */ -import java.util.function.Function; - /** * Interface for variables, i.e., terms of type * {@link TermType#UNIVERSAL_VARIABLE} and @@ -32,8 +30,4 @@ * @author Markus Krötzsch */ public interface Variable extends Term { - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java index 426c993c6..508360739 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java @@ -47,6 +47,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeAbstractConstant(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index 4b3669226..4ee1b98e0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -93,7 +93,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeLiteral(this)); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 31124e5d5..14673acdc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -37,6 +37,12 @@ * @author Maximilian Marx */ public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclarationRegistry { + + /** + * Pattern for strings that are permissible as local names in abbreviated forms. + */ + static public final String REGEXP_LOCNAME = "^[a-zA-Z]([/a-zA-Z0-9_-])*$"; + /** * Map associating each prefixName with the full prefixIri. */ @@ -101,7 +107,7 @@ public String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarat public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { String shortestIri; if (addIriBrackets) { - if (!iri.contains(":")) { + if (!iri.contains(":") && iri.matches(REGEXP_LOCNAME)) { shortestIri = iri; if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { throw new RulewerkRuntimeException("Relative IRIs cannot be serialized when a base is declared."); @@ -120,8 +126,7 @@ public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { String shorterIri = iri.substring(baseIri.length()); // Only allow very simple names of this form, to avoid confusion, e.g., with // numbers or boolean literals: - if (shorterIri.matches("^[a-zA-Z]([/a-zA-Z0-9_-])*$") && !"true".equals(shorterIri) - || !"false".equals(shorterIri)) { + if (shorterIri.matches(REGEXP_LOCNAME) && !"true".equals(shorterIri) || !"false".equals(shorterIri)) { shortestIri = shorterIri; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java index 24a750694..6da458d75 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java @@ -87,7 +87,7 @@ public Iterator iterator() { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeLiteralConjunction(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java index db80f1549..c031d5af4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java @@ -85,7 +85,7 @@ public T accept(StatementVisitor statementVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeDataSourceDeclaration(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index f58e13002..8696190c7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -60,12 +60,12 @@ public String getLexicalValue() { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); } @Override public String getName() { - return OldSerializer.getConstantName(this); + return toString(); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java index 2f7f41e5f..3a130ee21 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeExistentialVariable(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java index fad53f4bb..aa984d178 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java @@ -51,7 +51,7 @@ public T accept(final StatementVisitor statementVisitor) { @Override public String toString() { - return this.getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeFact(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 8a7ae5ca1..e7ab0f8dd 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -44,7 +44,7 @@ public LanguageStringConstantImpl(String string, String languageTag) { @Override public String getName() { - return OldSerializer.getConstantName(this); + return toString(); } @Override @@ -84,7 +84,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeLanguageStringConstant(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java index 803629460..0dae674db 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java @@ -48,6 +48,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeNamedNull(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java new file mode 100644 index 000000000..44cb41273 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java @@ -0,0 +1,560 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +/** + * A utility class with static methods to obtain the correct parsable string + * representation of the different data models. + * + * @author Ali Elhalawati + * + */ +@Deprecated +public final class OldSerializer { + private static final String NEW_LINE = "\n"; + public static final String STATEMENT_SEPARATOR = " ."; + public static final String COMMA = ", "; + public static final String NEGATIVE_IDENTIFIER = "~"; + public static final String EXISTENTIAL_IDENTIFIER = "!"; + public static final String UNIVERSAL_IDENTIFIER = "?"; + public static final String NAMEDNULL_IDENTIFIER = "_:"; + public static final String OPENING_PARENTHESIS = "("; + public static final String CLOSING_PARENTHESIS = ")"; + public static final String OPENING_BRACKET = "["; + public static final String CLOSING_BRACKET = "]"; + public static final String RULE_SEPARATOR = " :- "; + public static final char AT = '@'; + public static final String DATA_SOURCE = "@source "; + public static final String BASE = "@base "; + public static final String PREFIX = "@prefix "; + public static final String CSV_FILE_DATA_SOURCE = "load-csv"; + public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; + public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; + public static final String DATA_SOURCE_SEPARATOR = ": "; + public static final String COLON = ":"; + public static final String DOUBLE_CARET = "^^"; + public static final char LESS_THAN = '<'; + public static final char MORE_THAN = '>'; + public static final char QUOTE = '"'; + + public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; + public static final String REGEX_INTEGER = "^[-+]?\\d+$"; + public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; + public static final String REGEX_TRUE = "true"; + public static final String REGEX_FALSE = "false"; + + /** + * Constructor. + */ + private OldSerializer() { + + } + + /** + * Creates a String representation of a given {@link Rule}. + * + * @see Rule + * syntax + * @param rule a {@link Rule}. + * @return String representation corresponding to a given {@link Rule}. + * + */ + public static String getString(final Rule rule) { + return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link Conjunction}. + * + * @see Rule + * syntax + * @param conjunction a {@link Conjunction} + * @return String representation corresponding to a given {@link Conjunction}. + */ + public static String getString(final Conjunction conjunction) { + final StringBuilder stringBuilder = new StringBuilder(); + boolean first = true; + for (final Literal literal : conjunction.getLiterals()) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + stringBuilder.append(getString(literal)); + } + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Literal}. + * + * @see Rule + * syntax + * @param literal a {@link Literal} + * @return String representation corresponding to a given {@link Literal}. + */ + public static String getString(final Literal literal) { + final StringBuilder stringBuilder = new StringBuilder(""); + if (literal.isNegated()) { + stringBuilder.append(NEGATIVE_IDENTIFIER); + } + stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); + return stringBuilder.toString(); + } + + /** + * Creates a String representation of a given {@link Fact}. + * + * @see Rule + * syntax + * @param fact a {@link Fact} + * @return String representation corresponding to a given {@link Fact}. + */ + public static String getFactString(final Fact fact) { + return getString(fact) + STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant, final Function iriTransformer) { + return getIRIString(constant.getName(), iriTransformer); + } + + /** + * Creates a String representation of a given {@link AbstractConstant}. + * + * @see Rule + * syntax + * @param constant a {@link AbstractConstant} + * @return String representation corresponding to a given + * {@link AbstractConstant}. + */ + public static String getString(final AbstractConstant constant) { + return getIRIString(constant.getName()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + * + * @see Rule + * syntax + * @param languageStringConstant a {@link LanguageStringConstant} + * @return String representation corresponding to the name of a given + * {@link LanguageStringConstant}. + */ + public static String getConstantName(final LanguageStringConstant languageStringConstant) { + return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
    + *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • + *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • + *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • + *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • + *
  • {@code "test"^^} results in + * {@code "test"^^}, modulo transformation of the datatype + * IRI.
  • + *
+ * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { + return getString(datatypeConstant.getLexicalValue()); + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); + } + + return getConstantName(datatypeConstant, iriTransformer); + } + + /** + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
    + *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • + *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • + *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • + *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • + *
  • {@code "test"^^} results in + * {@code "test"^^}.
  • + *
+ * + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant, Function.identity()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + private static String getConstantName(final DatatypeConstant datatypeConstant, + final Function iriTransformer) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule + * syntax + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + addAngleBrackets(datatypeConstant.getDatatype()); + } + + /** + * Creates a String representation of a given {@link ExistentialVariable}. + * + * @see Rule + * syntax + * @param existentialVariable a {@link ExistentialVariable} + * @return String representation corresponding to a given + * {@link ExistentialVariable}. + */ + public static String getString(final ExistentialVariable existentialVariable) { + return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); + } + + /** + * Creates a String representation of a given {@link UniversalVariable}. + * + * @see Rule + * syntax + * @param universalVariable a {@link UniversalVariable} + * @return String representation corresponding to a given + * {@link UniversalVariable}. + */ + public static String getString(final UniversalVariable universalVariable) { + return UNIVERSAL_IDENTIFIER + universalVariable.getName(); + } + + /** + * Creates a String representation of a given {@link NamedNull}. + * + * @see Rule + * syntax + * @param namedNull a {@link NamedNull} + * @return String representation corresponding to a given {@link NamedNull}. + */ + public static String getString(final NamedNull namedNull) { + return NAMEDNULL_IDENTIFIER + namedNull.getName(); + } + + /** + * Creates a String representation of a given {@link Predicate}. + * + * @see Rule + * syntax + * @param predicate a {@link Predicate} + * @return String representation corresponding to a given {@link Predicate}. + */ + public static String getString(final Predicate predicate) { + return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; + } + + /** + * Creates a String representation of a given {@link DataSourceDeclaration}. + * + * @see Rule + * syntax + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @return String representation corresponding to a given + * {@link DataSourceDeclaration}. + */ + public static String getString(final DataSourceDeclaration dataSourceDeclaration) { + return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR; + // + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + + // STATEMENT_SEPARATOR; + } + + /** + * Creates a String representation of a given {@link CsvFileDataSource}. + * + * @see Rule + * syntax + * + * @param csvFileDataSource + * @return String representation corresponding to a given + * {@link CsvFileDataSource}. + */ + public static String getString(final CsvFileDataSource csvFileDataSource) { + return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given {@link RdfFileDataSource}. + * + * @see Rule + * syntax + * + * + * @param rdfFileDataSource + * @return String representation corresponding to a given + * {@link RdfFileDataSource}. + */ + public static String getString(final RdfFileDataSource rdfFileDataSource) { + return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; + } + + /** + * Creates a String representation of a given + * {@link SparqlQueryResultDataSource}. + * + * @see Rule + * syntax + * + * + * @param dataSource + * @return String representation corresponding to a given + * {@link SparqlQueryResultDataSource}. + */ + public static String getString(final SparqlQueryResultDataSource dataSource) { + return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS + + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA + + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) + + CLOSING_PARENTHESIS; + } + + private static String getFileString(final FileDataSource fileDataSource) { + return getString(fileDataSource.getPath()); + } + + private static String getIRIString(final String string) { + return getIRIString(string, Function.identity()); + } + + private static String getIRIString(final String string, final Function iriTransformer) { + final String transformed = iriTransformer.apply(string); + + if (!transformed.equals(string)) { + return transformed; + } + + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) + || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { + return addAngleBrackets(string); + } + + return string; + } + + /** + * Constructs the parseable, serialized representation of given {@code string}. + * Escapes (with {@code \}) special character occurrences in given + * {@code string}, and surrounds the result with double quotation marks + * ({@code "}). The special characters are: + *
    + *
  • {@code \}
  • + *
  • {@code "}
  • + *
  • {@code \t}
  • + *
  • {@code \b}
  • + *
  • {@code \n}
  • + *
  • {@code \r}
  • + *
  • {@code \f}
  • + *
+ * Example for {@code string = "\\a"}, the returned value is + * {@code string = "\"\\\\a\""} + * + * @param string + * @return an escaped string surrounded by {@code "}. + */ + public static String getString(final String string) { + return addQuotes(escape(string)); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
    + *
  • {@code \}
  • + *
  • {@code "}
  • + *
  • {@code \t}
  • + *
  • {@code \b}
  • + *
  • {@code \n}
  • + *
  • {@code \r}
  • + *
  • {@code \f}
  • + *
+ * + * @param string + * @return an escaped string + */ + private static String escape(final String string) { + return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); + // don't touch single quotes here since we only construct double-quoted strings + } + + private static String addQuotes(final String string) { + return QUOTE + string + QUOTE; + } + + private static String addAngleBrackets(final String string) { + return LESS_THAN + string + MORE_THAN; + } + + public static String getFactString(final Predicate predicate, final List terms) { + return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getFactString(final Predicate predicate, final List terms, + final Function iriTransformer) { + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getString(final Predicate predicate, final List terms) { + return getString(predicate, terms, Function.identity()); + } + + public static String getString(final Predicate predicate, final List terms, + final Function iriTransformer) { + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); + stringBuilder.append(OPENING_PARENTHESIS); + + boolean first = true; + for (final Term term : terms) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + final String string = term.getName();// term.getSyntacticRepresentation(iriTransformer); + stringBuilder.append(string); + } + stringBuilder.append(CLOSING_PARENTHESIS); + return stringBuilder.toString(); + } + + public static String getBaseString(final KnowledgeBase knowledgeBase) { + final String baseIri = knowledgeBase.getBaseIri(); + + return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); + } + + private static String getBaseDeclarationString(final String baseIri) { + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getPrefixString(final Entry prefix) { + return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; + } + + public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { + final StringBuilder sb = new StringBuilder(); + + sb.append(getBaseString(knowledgeBase)); + knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); + + return sb.toString(); + } + + public static String getCommandName(final String commandName) { + return AT + commandName; + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index da4bff697..accb87bc4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -86,7 +86,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writePredicate(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java index aae5c7233..15f35d27e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java @@ -105,7 +105,7 @@ public boolean equals(final Object obj) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeRule(this)); } @Override diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index a624590be..fe851f090 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -25,6 +25,7 @@ import java.io.Writer; import java.util.Iterator; +import java.util.List; import java.util.Map.Entry; import java.util.function.Function; @@ -68,7 +69,11 @@ public class Serializer { public static final Function identityIriSerializer = new Function() { @Override public String apply(String iri) { - return iri.contains(":") ? "<" + iri + ">" : iri; + if (iri.contains(":") || !iri.matches(AbstractPrefixDeclarationRegistry.REGEXP_LOCNAME)) { + return "<" + iri + ">"; + } else { + return iri; + } } }; @@ -310,11 +315,26 @@ public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclarati * @throws IOException */ public void writeLiteral(Literal literal) throws IOException { - writer.write(getIri(literal.getPredicate().getName())); + if (literal.isNegated()) { + writer.write("~"); + } + writePositiveLiteral(literal.getPredicate(), literal.getArguments()); + } + + /** + * Serialize the given predicate and list of terms like a + * {@link PositiveLiteral}. + * + * @param predicate a {@link Predicate} + * @param arguments a list of {@link Term} arguments + * @throws IOException + */ + public void writePositiveLiteral(Predicate predicate, List arguments) throws IOException { + writer.write(getIri(predicate.getName())); writer.write("("); boolean first = true; - for (final Term term : literal.getArguments()) { + for (final Term term : arguments) { if (first) { first = false; } else { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java index ce6b40aa6..4dde3f11d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java @@ -46,6 +46,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return getSyntacticRepresentation(); + return Serializer.getSerialization(serializer -> serializer.writeUniversalVariable(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index ff9a2d8a0..3f759f573 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -23,7 +23,9 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; -import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.function.BiConsumer; import java.util.stream.Stream; @@ -36,13 +38,14 @@ import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface that exposes the (existential) rule reasoning capabilities of a @@ -131,11 +134,16 @@ default Correctness unsafeForEachInference(BiConsumer> act * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException */ - default Correctness writeInferences(OutputStream stream) throws IOException { - final KnowledgeBase knowledgeBase = getKnowledgeBase(); - stream.write(OldSerializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); - return forEachInference((predicate, termList) -> stream - .write(OldSerializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri).getBytes())); + default Correctness writeInferences(Writer writer) throws IOException { + final PrefixDeclarationRegistry prefixDeclarationRegistry = getKnowledgeBase().getPrefixDeclarationRegistry(); + final Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); + + serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + + return forEachInference((predicate, termList) -> { + serializer.writePositiveLiteral(predicate, termList); + writer.write(" .\n"); + }); } /** @@ -160,18 +168,21 @@ default Stream getInferences() { Correctness getCorrectness(); /** - * Exports all the (explicit and implicit) facts inferred during - * reasoning of the knowledge base to a desired file. + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to a desired file. * * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException * @throws FileNotFoundException + * @deprecated Use {@link KnowledgeBase#writeInferences(Writer)} instead. The + * method will disappear. */ + @Deprecated default Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - return writeInferences(stream); + try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { + return this.writeInferences(writer); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index c3347b47a..66fcf39c9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -23,9 +23,6 @@ import java.io.IOException; import java.util.Arrays; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of * the extension {@code .csv}. These fact tuples can be associated with a single @@ -51,6 +48,11 @@ */ public class CsvFileDataSource extends FileDataSource { + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "load-csv"; + private static final Iterable possibleExtensions = Arrays.asList(".csv", ".csv.gz"); /** @@ -73,11 +75,6 @@ public String toString() { return "CsvFileDataSource [csvFile=" + getFile() + "]"; } - @Override - public String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); @@ -85,6 +82,6 @@ public void accept(DataSourceConfigurationVisitor visitor) throws IOException { @Override String getDeclarationPredicateName() { - return "load-csv"; + return declarationPredicateName; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 3dc4c9d2b..18fe4b181 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -24,8 +24,6 @@ import java.util.Arrays; import java.util.Optional; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; - /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside * a file of the extension {@code .nt}. These fact triples can be associated @@ -49,6 +47,11 @@ */ public class RdfFileDataSource extends FileDataSource { + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "load-rdf"; + private final static Iterable possibleExtensions = Arrays.asList(".nt", ".nt.gz"); /** @@ -71,11 +74,6 @@ public String toString() { return "RdfFileDataSource [rdfFile=" + this.getFile() + "]"; } - @Override - public String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - @Override public Optional getRequiredArity() { return Optional.of(3); @@ -88,6 +86,6 @@ public void accept(DataSourceConfigurationVisitor visitor) throws IOException { @Override String getDeclarationPredicateName() { - return "load-rdf"; + return declarationPredicateName; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index b89e22068..da80ea3b4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -32,7 +32,6 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a @@ -43,6 +42,11 @@ */ public class SparqlQueryResultDataSource implements ReasonerDataSource { + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "sparql"; + private final URL endpoint; private final String queryVariables; private final String queryBody; @@ -160,11 +164,6 @@ public String toString() { + ", queryBody=" + this.queryBody + "]"; } - @Override - public String getSyntacticRepresentation() { - return OldSerializer.getString(this); - } - @Override public void accept(DataSourceConfigurationVisitor visitor) { visitor.visit(this); @@ -172,7 +171,7 @@ public void accept(DataSourceConfigurationVisitor visitor) { @Override public Fact getDeclarationFact() { - Predicate predicate = Expressions.makePredicate("sparql", 3); + Predicate predicate = Expressions.makePredicate(declarationPredicateName, 3); Term endpointTerm = Expressions.makeAbstractConstant(getEndpoint().toString()); Term variablesTerm = Expressions.makeDatatypeConstant(getQueryVariables(), PrefixDeclarationRegistry.XSD_STRING); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 3c415a190..ef5c89e30 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -33,7 +33,6 @@ import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -89,7 +88,7 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); + final String expectedFilePath = "\"" + relativeDirName + fileName + "\""; assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @@ -114,7 +113,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = OldSerializer.getString(relativeDirName + fileName); + final String expectedFilePath = "\"" + relativeDirName + fileName + "\""; assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 871fec96f..9ecff982a 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -68,6 +68,7 @@ public class SerializerTest { static PositiveLiteral l1 = Expressions.makePositiveLiteral(p1, universalVariable); static Literal l2 = Expressions.makePositiveLiteral(p2, universalVariable, abstractConstantShort); static Rule rule = Expressions.makeRule(l1, l2, fact); + static Literal ln1 = Expressions.makeNegativeLiteral(p1, existentialVariable); StringWriter writer; Serializer serializer; @@ -161,6 +162,24 @@ public void serializeSparqlDataSourceDeclaration() throws IOException { assertEquals("@source p1[1]: sparql(, \"var\", \"?var \") .", writer.toString()); } + @Test + public void serializePositiveLiteral() throws IOException { + serializer.writeLiteral(l1); + assertEquals("p1(?X)", writer.toString()); + } + + @Test + public void serializePositiveLiteralFromTerms() throws IOException { + serializer.writePositiveLiteral(l1.getPredicate(),l1.getArguments()); + assertEquals("p1(?X)", writer.toString()); + } + + @Test + public void serializeNegativeLiteral() throws IOException { + serializer.writeLiteral(ln1); + assertEquals("~p1(!X)", writer.toString()); + } + @Test public void serializeAbstractConstantWithPrefixDeclarations() throws IOException { final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java index 9aa9f7129..2420dc479 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java @@ -97,10 +97,10 @@ public void abstractConstantGetterTest() { @Test public void datatypeConstantGetterTest() { - DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); + DatatypeConstant c = new DatatypeConstantImpl("c", "http://example.org/type"); assertEquals("c", c.getLexicalValue()); - assertEquals("http://www.w3.org/2001/XMLSchema#string", c.getDatatype()); - assertEquals("\"c\"^^", c.getName()); + assertEquals("http://example.org/type", c.getDatatype()); + assertEquals("\"c\"^^", c.getName()); assertEquals(TermType.DATATYPE_CONSTANT, c.getType()); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index ca4561dac..d8ce99ddb 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -1,5 +1,9 @@ package org.semanticweb.rulewerk.parser; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + /*- * #%L * Rulewerk Parser @@ -20,7 +24,6 @@ * #L% */ -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; @@ -43,9 +46,9 @@ public DefaultParserConfiguration() { * Register built-in data sources (currently CSV, RDF, SPARQL). */ private void registerDefaultDataSources() { - registerDataSource(OldSerializer.CSV_FILE_DATA_SOURCE, new CsvFileDataSourceDeclarationHandler()); - registerDataSource(OldSerializer.RDF_FILE_DATA_SOURCE, new RdfFileDataSourceDeclarationHandler()); - registerDataSource(OldSerializer.SPARQL_QUERY_RESULT_DATA_SOURCE, + registerDataSource(CsvFileDataSource.declarationPredicateName, new CsvFileDataSourceDeclarationHandler()); + registerDataSource(RdfFileDataSource.declarationPredicateName, new RdfFileDataSourceDeclarationHandler()); + registerDataSource(SparqlQueryResultDataSource.declarationPredicateName, new SparqlQueryResultDataSourceDeclarationHandler()); } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java index 910e9375b..442e56de9 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java @@ -185,12 +185,10 @@ public void literalToStringRoundTripTest() throws ParsingException { @Test public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingException { - String shortDoubleConstant = "12.345E67"; - assertEquals(shortDoubleConstant, - RuleParser.parseFact("p(\"" + shortDoubleConstant + "\"^^).") - .getArguments().get(0).toString()); - assertEquals(shortDoubleConstant, - RuleParser.parseFact("p(" + shortDoubleConstant + ").").getArguments().get(0).toString()); + String doubleConstant = "\"12.345E67\"^^"; + assertEquals(doubleConstant, + RuleParser.parseFact("p(" + doubleConstant + ").").getArguments().get(0).toString()); + assertEquals(doubleConstant, RuleParser.parseFact("p(12.345E67).").getArguments().get(0).toString()); } @Test @@ -221,11 +219,9 @@ public void datatypeIntegerConstantToStringRoundTripTest() throws ParsingExcepti @Test public void datatypeDecimalToStringRoundTripTest() throws ParsingException { - String shortDecimalConstant = "0.23"; - assertEquals(shortDecimalConstant, - RuleParser.parseFact("p(\"" + shortDecimalConstant + "\"^^).") - .getArguments().get(0).toString()); - assertEquals(shortDecimalConstant, - RuleParser.parseFact("p(" + shortDecimalConstant + ").").getArguments().get(0).toString()); + String decimalConstant = "\"0.23\"^^"; + assertEquals(decimalConstant, + RuleParser.parseFact("p(" + decimalConstant + ").").getArguments().get(0).toString()); + assertEquals(decimalConstant, RuleParser.parseFact("p(0.23).").getArguments().get(0).toString()); } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 271a49598..42041cc6d 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -249,7 +249,7 @@ public void parseLiteral_escapeSequences_succeeds() throws ParsingException { public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); - assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test @@ -265,7 +265,7 @@ public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); - assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) @@ -292,7 +292,7 @@ public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); - assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java index 4952cfb84..e406487f9 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -73,7 +73,7 @@ public String[][] getData() { } @Override - public String getSyntacticRepresentation() { + public String toString() { final StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); for (int i = 0; i < getData().length; i++) { diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index 3b7876dda..b237dc947 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -30,7 +30,6 @@ import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; /** @@ -83,15 +82,15 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { static Term toTerm(karmaresearch.vlog.Term vLogTerm) { final String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { - case CONSTANT: - return toConstant(name); - case BLANK: - return new NamedNullImpl(name); - case VARIABLE: - throw new IllegalArgumentException( - "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); - default: - throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); + case CONSTANT: + return toConstant(name); + case BLANK: + return new NamedNullImpl(name); + case VARIABLE: + throw new IllegalArgumentException( + "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); + default: + throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); } } @@ -103,19 +102,17 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { */ static Constant toConstant(String vLogConstantName) { final Constant constant; - if (vLogConstantName.charAt(0) == OldSerializer.LESS_THAN - && vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { + if (vLogConstantName.charAt(0) == '<' && vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { // strip <> off of IRIs constant = new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); - } else if (vLogConstantName.charAt(0) == OldSerializer.QUOTE) { - if (vLogConstantName.charAt(vLogConstantName.length() - 1) == OldSerializer.MORE_THAN) { - final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.LESS_THAN, - vLogConstantName.length() - 2); + } else if (vLogConstantName.charAt(0) == '"') { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + final int startTypeIdx = vLogConstantName.lastIndexOf('<', vLogConstantName.length() - 2); final String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); final String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); constant = new DatatypeConstantImpl(lexicalValue, datatype); } else { - final int startTypeIdx = vLogConstantName.lastIndexOf(OldSerializer.AT, vLogConstantName.length() - 2); + final int startTypeIdx = vLogConstantName.lastIndexOf('@', vLogConstantName.length() - 2); if (startTypeIdx > -1) { final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); final String string = vLogConstantName.substring(1, startTypeIdx - 1); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java index 19365056e..c5ff2617d 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -23,8 +23,8 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -118,15 +118,15 @@ public void writeInferences_withPrefixDeclarations_abbreviatesIris() assertTrue("the abbreviated fact is present", getInferences().contains("eg:s(eg:c) .")); } - @Test + @Test(expected = RulewerkRuntimeException.class) public void writeInferences_withBase_writesBase() throws IOException, PrefixDeclarationException { PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); when(prefixDeclarations.getBaseIri()).thenReturn("http://example.org/"); when(prefixDeclarations.iterator()).thenReturn(new HashMap().entrySet().iterator()); kb.mergePrefixDeclarations(prefixDeclarations); - - assertEquals(11, getInferences().size()); - assertTrue("the base declaration is present", getInferences().contains("@base .")); + getInferences(); + //assertEquals(11, getInferences().size()); + //assertTrue("the base declaration is present", getInferences().contains("@base .")); } @Test @@ -134,8 +134,7 @@ public void getInferences_example_succeeds() throws IOException { final List inferences = getInferences(); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final List fromStream = reasoner.getInferences().map(Fact::getSyntacticRepresentation) - .collect(Collectors.toList()); + final List fromStream = reasoner.getInferences().map(Fact::toString).collect(Collectors.toList()); assertEquals(inferences, fromStream); } } @@ -148,7 +147,7 @@ public void unsafeForEachInference_example_succeeds() throws IOException { final List fromUnsafe = new ArrayList<>(); reasoner.unsafeForEachInference((Predicate, terms) -> { - fromUnsafe.add(Expressions.makeFact(Predicate, terms).getSyntacticRepresentation()); + fromUnsafe.add(Expressions.makeFact(Predicate, terms).toString()); }); assertEquals(inferences, fromUnsafe); @@ -183,11 +182,10 @@ public void unsafeForEachInference_throwingAction_throws() throws IOException { private List getInferences() throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - reasoner.writeInferences(stream); - stream.flush(); + StringWriter writer = new StringWriter(); + reasoner.writeInferences(writer); - Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s?)\\.\\s*")); + Stream inferences = Arrays.stream(writer.toString().split("(?<=[>)]\\s?)\\.\\s*")); return inferences.map((String inference) -> inference + ".").collect(Collectors.toList()); } From c50a5faaccf49e0fbe51334ddfc82dc72aa02440 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:46:18 +0200 Subject: [PATCH 0954/1255] Support serialization of Commands --- .../rulewerk/core/model/api/Command.java | 22 +++------------- .../core/model/implementation/Serializer.java | 25 +++++++++++++++++++ 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java index 1a5dc295b..5240358ad 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Class for representing a generic command that can be executed. @@ -64,23 +64,9 @@ public List getArguments() { return arguments; } - public String getSyntacticRepresentation() { - StringBuilder result = new StringBuilder("@"); - result.append(name); - for (Argument argument : arguments) { - result.append(" "); - if (argument.fromRule().isPresent()) { - Rule rule = argument.fromRule().get(); - result.append(OldSerializer.getString(rule.getHead())).append(OldSerializer.RULE_SEPARATOR) - .append(OldSerializer.getString(rule.getBody())); - } else if (argument.fromPositiveLiteral().isPresent()) { - result.append(argument.fromPositiveLiteral().get().toString()); - } else { - throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); - } - } - result.append(OldSerializer.STATEMENT_SEPARATOR); - return result.toString(); + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeCommand(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index fe851f090..0c1e41d58 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -30,6 +30,8 @@ import java.util.function.Function; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Conjunction; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; @@ -495,6 +497,29 @@ public void writeLanguageStringConstant(LanguageStringConstant languageStringCon writer.write(languageStringConstant.getLanguageTag()); } + /** + * Writes a serialization of the given {@link Command}. + * + * @param command a {@link Command} + * @throws IOException + */ + public void writeCommand(Command command) throws IOException { + writer.write("@"); + writer.write(command.getName()); + + for (Argument argument : command.getArguments()) { + writer.write(" "); + if (argument.fromRule().isPresent()) { + writeRule(argument.fromRule().get()); + } else if (argument.fromPositiveLiteral().isPresent()) { + writeLiteral(argument.fromPositiveLiteral().get()); + } else { + throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); + } + } + writer.write(STATEMENT_END); + } + /** * Convenience method for obtaining serializations as Java strings. * From 423d0b8699aba532c976014df4b948719122eb29 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:46:33 +0200 Subject: [PATCH 0955/1255] Avoid deprecated OldSerializer --- .../rulewerk/client/shell/DefaultConfiguration.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 927c61701..73477a47b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -34,7 +34,6 @@ import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.implementation.OldSerializer; public final class DefaultConfiguration { @@ -63,7 +62,7 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> OldSerializer.getCommandName(commandName)) + .map(commandName -> "@" + commandName) .collect(Collectors.toList()); return new StringsCompleter(serializedCommandNames); } From 23fe99b61723dbd6d0f7dacfc7ba6bf228164af0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 01:57:51 +0200 Subject: [PATCH 0956/1255] Remove old serializer code --- .../rulewerk/core/model/api/Conjunction.java | 2 +- .../core/model/api/DataSourceDeclaration.java | 2 +- .../rulewerk/core/model/api/Entity.java | 4 +- .../rulewerk/core/model/api/Literal.java | 2 +- .../rulewerk/core/model/api/Rule.java | 2 +- .../rulewerk/core/model/api/Statement.java | 2 +- .../rulewerk/core/model/api/SyntaxObject.java | 2 +- .../model/implementation/OldSerializer.java | 560 ------------------ 8 files changed, 7 insertions(+), 569 deletions(-) delete mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 60b53ea1a..f7b8b760f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Conjunction extends Iterable, SyntaxObject, Entity { +public interface Conjunction extends Iterable, SyntaxObject { /** * Returns the list of literals that are part of this conjunction. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index 14fb54412..22efc0aae 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface DataSourceDeclaration extends Statement, Entity { +public interface DataSourceDeclaration extends Statement { /** * Returns the {@link Predicate} that this source applies to. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index c4bfd7a16..75c40aa3d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -21,9 +21,7 @@ */ /** - * Interface for every parsable data model that has a string representation - * - * @author Ali Elhalawati + * Most general type of syntactic entity in Rulewerk. * */ public interface Entity { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index 896dc1f6f..b345b070d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -31,7 +31,7 @@ * @author david.carral@tu-dresden.de * @author Irina Dragoste */ -public interface Literal extends SyntaxObject, Entity { +public interface Literal extends SyntaxObject { boolean isNegated(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 6b4e0ea79..9187282eb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -30,7 +30,7 @@ * @author Markus Krötzsch * */ -public interface Rule extends SyntaxObject, Statement, Entity { +public interface Rule extends SyntaxObject, Statement { /** * Returns the conjunction of head literals (the consequence of the rule). diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java index fc2b4b009..f43c03248 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java @@ -27,7 +27,7 @@ * @author Markus Kroetzsch * */ -public interface Statement { +public interface Statement extends Entity { /** * Accept a {@link StatementVisitor} and return its output. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java index 31de18cf6..7dcf50142 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java @@ -30,7 +30,7 @@ * @author Markus Kroetzsch * */ -public interface SyntaxObject { +public interface SyntaxObject extends Entity { /** * Returns the stream of distinct terms that occur in this object. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java deleted file mode 100644 index 44cb41273..000000000 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/OldSerializer.java +++ /dev/null @@ -1,560 +0,0 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.Literal; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; - -/** - * A utility class with static methods to obtain the correct parsable string - * representation of the different data models. - * - * @author Ali Elhalawati - * - */ -@Deprecated -public final class OldSerializer { - private static final String NEW_LINE = "\n"; - public static final String STATEMENT_SEPARATOR = " ."; - public static final String COMMA = ", "; - public static final String NEGATIVE_IDENTIFIER = "~"; - public static final String EXISTENTIAL_IDENTIFIER = "!"; - public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_:"; - public static final String OPENING_PARENTHESIS = "("; - public static final String CLOSING_PARENTHESIS = ")"; - public static final String OPENING_BRACKET = "["; - public static final String CLOSING_BRACKET = "]"; - public static final String RULE_SEPARATOR = " :- "; - public static final char AT = '@'; - public static final String DATA_SOURCE = "@source "; - public static final String BASE = "@base "; - public static final String PREFIX = "@prefix "; - public static final String CSV_FILE_DATA_SOURCE = "load-csv"; - public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; - public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; - public static final String DATA_SOURCE_SEPARATOR = ": "; - public static final String COLON = ":"; - public static final String DOUBLE_CARET = "^^"; - public static final char LESS_THAN = '<'; - public static final char MORE_THAN = '>'; - public static final char QUOTE = '"'; - - public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; - public static final String REGEX_INTEGER = "^[-+]?\\d+$"; - public static final String REGEX_DECIMAL = "^(\\d*\\.)?\\d+$"; - public static final String REGEX_TRUE = "true"; - public static final String REGEX_FALSE = "false"; - - /** - * Constructor. - */ - private OldSerializer() { - - } - - /** - * Creates a String representation of a given {@link Rule}. - * - * @see Rule - * syntax - * @param rule a {@link Rule}. - * @return String representation corresponding to a given {@link Rule}. - * - */ - public static String getString(final Rule rule) { - return getString(rule.getHead()) + RULE_SEPARATOR + getString(rule.getBody()) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link Conjunction}. - * - * @see Rule - * syntax - * @param conjunction a {@link Conjunction} - * @return String representation corresponding to a given {@link Conjunction}. - */ - public static String getString(final Conjunction conjunction) { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Literal literal : conjunction.getLiterals()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - stringBuilder.append(getString(literal)); - } - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Literal}. - * - * @see Rule - * syntax - * @param literal a {@link Literal} - * @return String representation corresponding to a given {@link Literal}. - */ - public static String getString(final Literal literal) { - final StringBuilder stringBuilder = new StringBuilder(""); - if (literal.isNegated()) { - stringBuilder.append(NEGATIVE_IDENTIFIER); - } - stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); - return stringBuilder.toString(); - } - - /** - * Creates a String representation of a given {@link Fact}. - * - * @see Rule - * syntax - * @param fact a {@link Fact} - * @return String representation corresponding to a given {@link Fact}. - */ - public static String getFactString(final Fact fact) { - return getString(fact) + STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant, final Function iriTransformer) { - return getIRIString(constant.getName(), iriTransformer); - } - - /** - * Creates a String representation of a given {@link AbstractConstant}. - * - * @see Rule - * syntax - * @param constant a {@link AbstractConstant} - * @return String representation corresponding to a given - * {@link AbstractConstant}. - */ - public static String getString(final AbstractConstant constant) { - return getIRIString(constant.getName()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - * - * @see Rule - * syntax - * @param languageStringConstant a {@link LanguageStringConstant} - * @return String representation corresponding to the name of a given - * {@link LanguageStringConstant}. - */ - public static String getConstantName(final LanguageStringConstant languageStringConstant) { - return getString(languageStringConstant.getString()) + AT + languageStringConstant.getLanguageTag(); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
    - *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • - *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • - *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • - *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • - *
  • {@code "test"^^} results in - * {@code "test"^^}, modulo transformation of the datatype - * IRI.
  • - *
- * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { - return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } - - return getConstantName(datatypeConstant, iriTransformer); - } - - /** - * Creates a String representation corresponding to the given - * {@link DatatypeConstant}. For datatypes that have specialised lexical - * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), - * this representation is returned, otherwise the result is a generic literal - * with full datatype IRI. - * - * examples: - *
    - *
  • {@code "string"^^xsd:String} results in {@code "string"},
  • - *
  • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
  • - *
  • {@code "42"^^xsd:Integer} results in {@code 42},
  • - *
  • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
  • - *
  • {@code "test"^^} results in - * {@code "test"^^}.
  • - *
- * - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getString(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant, Function.identity()); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - private static String getConstantName(final DatatypeConstant datatypeConstant, - final Function iriTransformer) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); - } - - /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} including an IRI. - * - * @see Rule - * syntax - * @param datatypeConstant a {@link DatatypeConstant} - * @return String representation corresponding to a given - * {@link DatatypeConstant}. - */ - public static String getConstantName(final DatatypeConstant datatypeConstant) { - return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + addAngleBrackets(datatypeConstant.getDatatype()); - } - - /** - * Creates a String representation of a given {@link ExistentialVariable}. - * - * @see Rule - * syntax - * @param existentialVariable a {@link ExistentialVariable} - * @return String representation corresponding to a given - * {@link ExistentialVariable}. - */ - public static String getString(final ExistentialVariable existentialVariable) { - return EXISTENTIAL_IDENTIFIER + existentialVariable.getName(); - } - - /** - * Creates a String representation of a given {@link UniversalVariable}. - * - * @see Rule - * syntax - * @param universalVariable a {@link UniversalVariable} - * @return String representation corresponding to a given - * {@link UniversalVariable}. - */ - public static String getString(final UniversalVariable universalVariable) { - return UNIVERSAL_IDENTIFIER + universalVariable.getName(); - } - - /** - * Creates a String representation of a given {@link NamedNull}. - * - * @see Rule - * syntax - * @param namedNull a {@link NamedNull} - * @return String representation corresponding to a given {@link NamedNull}. - */ - public static String getString(final NamedNull namedNull) { - return NAMEDNULL_IDENTIFIER + namedNull.getName(); - } - - /** - * Creates a String representation of a given {@link Predicate}. - * - * @see Rule - * syntax - * @param predicate a {@link Predicate} - * @return String representation corresponding to a given {@link Predicate}. - */ - public static String getString(final Predicate predicate) { - return predicate.getName() + OPENING_BRACKET + predicate.getArity() + CLOSING_BRACKET; - } - - /** - * Creates a String representation of a given {@link DataSourceDeclaration}. - * - * @see Rule - * syntax - * @param dataSourceDeclaration a {@link DataSourceDeclaration} - * @return String representation corresponding to a given - * {@link DataSourceDeclaration}. - */ - public static String getString(final DataSourceDeclaration dataSourceDeclaration) { - return DATA_SOURCE + getString(dataSourceDeclaration.getPredicate()) + DATA_SOURCE_SEPARATOR; - // + dataSourceDeclaration.getDataSource().getSyntacticRepresentation() + - // STATEMENT_SEPARATOR; - } - - /** - * Creates a String representation of a given {@link CsvFileDataSource}. - * - * @see Rule - * syntax - * - * @param csvFileDataSource - * @return String representation corresponding to a given - * {@link CsvFileDataSource}. - */ - public static String getString(final CsvFileDataSource csvFileDataSource) { - return CSV_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(csvFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given {@link RdfFileDataSource}. - * - * @see Rule - * syntax - * - * - * @param rdfFileDataSource - * @return String representation corresponding to a given - * {@link RdfFileDataSource}. - */ - public static String getString(final RdfFileDataSource rdfFileDataSource) { - return RDF_FILE_DATA_SOURCE + OPENING_PARENTHESIS + getFileString(rdfFileDataSource) + CLOSING_PARENTHESIS; - } - - /** - * Creates a String representation of a given - * {@link SparqlQueryResultDataSource}. - * - * @see Rule - * syntax - * - * - * @param dataSource - * @return String representation corresponding to a given - * {@link SparqlQueryResultDataSource}. - */ - public static String getString(final SparqlQueryResultDataSource dataSource) { - return SPARQL_QUERY_RESULT_DATA_SOURCE + OPENING_PARENTHESIS - + addAngleBrackets(dataSource.getEndpoint().toString()) + COMMA - + addQuotes(dataSource.getQueryVariables()) + COMMA + addQuotes(dataSource.getQueryBody()) - + CLOSING_PARENTHESIS; - } - - private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getPath()); - } - - private static String getIRIString(final String string) { - return getIRIString(string, Function.identity()); - } - - private static String getIRIString(final String string, final Function iriTransformer) { - final String transformed = iriTransformer.apply(string); - - if (!transformed.equals(string)) { - return transformed; - } - - if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) - || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { - return addAngleBrackets(string); - } - - return string; - } - - /** - * Constructs the parseable, serialized representation of given {@code string}. - * Escapes (with {@code \}) special character occurrences in given - * {@code string}, and surrounds the result with double quotation marks - * ({@code "}). The special characters are: - *
    - *
  • {@code \}
  • - *
  • {@code "}
  • - *
  • {@code \t}
  • - *
  • {@code \b}
  • - *
  • {@code \n}
  • - *
  • {@code \r}
  • - *
  • {@code \f}
  • - *
- * Example for {@code string = "\\a"}, the returned value is - * {@code string = "\"\\\\a\""} - * - * @param string - * @return an escaped string surrounded by {@code "}. - */ - public static String getString(final String string) { - return addQuotes(escape(string)); - } - - /** - * Escapes (with {@code \}) special character occurrences in given - * {@code string}. The special characters are: - *
    - *
  • {@code \}
  • - *
  • {@code "}
  • - *
  • {@code \t}
  • - *
  • {@code \b}
  • - *
  • {@code \n}
  • - *
  • {@code \r}
  • - *
  • {@code \f}
  • - *
- * - * @param string - * @return an escaped string - */ - private static String escape(final String string) { - return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); - // don't touch single quotes here since we only construct double-quoted strings - } - - private static String addQuotes(final String string) { - return QUOTE + string + QUOTE; - } - - private static String addAngleBrackets(final String string) { - return LESS_THAN + string + MORE_THAN; - } - - public static String getFactString(final Predicate predicate, final List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getFactString(final Predicate predicate, final List terms, - final Function iriTransformer) { - return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getString(final Predicate predicate, final List terms) { - return getString(predicate, terms, Function.identity()); - } - - public static String getString(final Predicate predicate, final List terms, - final Function iriTransformer) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); - stringBuilder.append(OPENING_PARENTHESIS); - - boolean first = true; - for (final Term term : terms) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getName();// term.getSyntacticRepresentation(iriTransformer); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); - } - - public static String getBaseString(final KnowledgeBase knowledgeBase) { - final String baseIri = knowledgeBase.getBaseIri(); - - return baseIri.equals(PrefixDeclarationRegistry.EMPTY_BASE) ? baseIri : getBaseDeclarationString(baseIri); - } - - private static String getBaseDeclarationString(final String baseIri) { - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getPrefixString(final Entry prefix) { - return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; - } - - public static String getBaseAndPrefixDeclarations(final KnowledgeBase knowledgeBase) { - final StringBuilder sb = new StringBuilder(); - - sb.append(getBaseString(knowledgeBase)); - knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); - - return sb.toString(); - } - - public static String getCommandName(final String commandName) { - return AT + commandName; - } -} From 4e6d581e0e048b6fb76779f37369c100cd91ac57 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 09:20:45 +0200 Subject: [PATCH 0957/1255] fix Command serialization --- .../core/model/implementation/Serializer.java | 16 +++++++++++++--- .../rulewerk/core/model/SerializerTest.java | 19 +++++++++++++++++-- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 0c1e41d58..740200569 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -290,10 +290,20 @@ public void writeFact(Fact fact) throws IOException { * @throws IOException */ public void writeRule(Rule rule) throws IOException { + writeRuleNoStatment(rule); + writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Rule} without the final dot. + * + * @param rule a {@link Rule} + * @throws IOException + */ + private void writeRuleNoStatment(Rule rule) throws IOException { writeLiteralConjunction(rule.getHead()); writer.write(" :- "); writeLiteralConjunction(rule.getBody()); - writer.write(STATEMENT_END); } /** @@ -510,11 +520,11 @@ public void writeCommand(Command command) throws IOException { for (Argument argument : command.getArguments()) { writer.write(" "); if (argument.fromRule().isPresent()) { - writeRule(argument.fromRule().get()); + writeRuleNoStatment(argument.fromRule().get()); } else if (argument.fromPositiveLiteral().isPresent()) { writeLiteral(argument.fromPositiveLiteral().get()); } else { - throw new UnsupportedOperationException("Serialisation of commands is not fully implemented yet."); + writeTerm(argument.fromTerm().get()); } } writer.write(STATEMENT_END); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 9ecff982a..332524344 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -26,10 +26,13 @@ import java.io.StringWriter; import java.io.Writer; import java.net.URL; +import java.util.ArrayList; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Literal; @@ -167,10 +170,10 @@ public void serializePositiveLiteral() throws IOException { serializer.writeLiteral(l1); assertEquals("p1(?X)", writer.toString()); } - + @Test public void serializePositiveLiteralFromTerms() throws IOException { - serializer.writePositiveLiteral(l1.getPredicate(),l1.getArguments()); + serializer.writePositiveLiteral(l1.getPredicate(), l1.getArguments()); assertEquals("p1(?X)", writer.toString()); } @@ -211,6 +214,18 @@ public void serializePrefixDeclarations() throws IOException { assertEquals("@base .\n@prefix eg: .\n", writer.toString()); } + @Test + public void serializeCommand() throws IOException { + ArrayList arguments = new ArrayList<>(); + arguments.add(Argument.term(abstractConstant)); + arguments.add(Argument.positiveLiteral(fact)); + arguments.add(Argument.rule(rule)); + Command command = new Command("command", arguments); + + serializer.writeCommand(command); + assertEquals("@command p1(c) p1(?X) :- p2(?X, c), p1(c) .", writer.toString()); + } + @Test public void createThrowingSerializer_succeeds() throws IOException { getThrowingSerializer(); From 12c9b9248509736089ee08245170e4ec2367ee5e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 09:38:52 +0200 Subject: [PATCH 0958/1255] nicer serialisation of KB --- .../core/model/implementation/Serializer.java | 11 ++++++++-- .../rulewerk/core/reasoner/KnowledgeBase.java | 16 +++++++++++++- .../rulewerk/core/model/SerializerTest.java | 21 ++++++++++++++++++- .../core/reasoner/KnowledgeBaseTest.java | 2 +- 4 files changed, 45 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 740200569..810b85a0e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -467,12 +467,16 @@ public void writeNamedNull(NamedNull namedNull) throws IOException { } /** - * Writes a serialization of the given {@link PrefixDeclarationRegistry}. + * Writes a serialization of the given {@link PrefixDeclarationRegistry}, and + * returns true if anything has been written. * * @param prefixDeclarationRegistry a {@link PrefixDeclarationRegistry} * @throws IOException + * @return true if anything has been written */ - public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) throws IOException { + public boolean writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) + throws IOException { + boolean result = false; final String baseIri = prefixDeclarationRegistry.getBaseIri(); if (!PrefixDeclarationRegistry.EMPTY_BASE.contentEquals(baseIri)) { writer.write("@base <"); @@ -480,6 +484,7 @@ public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDecla writer.write(">"); writer.write(STATEMENT_END); writer.write("\n"); + result = true; } Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); @@ -492,7 +497,9 @@ public void writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDecla writer.write(">"); writer.write(STATEMENT_END); writer.write("\n"); + result = true; } + return result; } /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 698683e22..fda90958d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -591,19 +591,33 @@ public String unresolveAbsoluteIri(String iri) { public void writeKnowledgeBase(Writer writer) throws IOException { Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); - serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + boolean makeSeperator = serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); for (DataSourceDeclaration dataSourceDeclaration : this.getDataSourceDeclarations()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } serializer.writeDataSourceDeclaration(dataSourceDeclaration); writer.write('\n'); } + makeSeperator |= !this.getDataSourceDeclarations().isEmpty(); for (Fact fact : this.getFacts()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } serializer.writeFact(fact); writer.write('\n'); } + makeSeperator |= !this.getFacts().isEmpty(); for (Rule rule : this.getRules()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } serializer.writeRule(rule); writer.write('\n'); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java index 332524344..f495de508 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java @@ -210,8 +210,19 @@ public void serializePrefixDeclarations() throws IOException { prefixes.setPrefixIri("eg:", "http://example.org/"); Serializer prefSerializer = new Serializer(writer, prefixes); - prefSerializer.writePrefixDeclarationRegistry(prefixes); + boolean result = prefSerializer.writePrefixDeclarationRegistry(prefixes); assertEquals("@base .\n@prefix eg: .\n", writer.toString()); + assertTrue(result); + } + + @Test + public void serializeEmptyPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + Serializer prefSerializer = new Serializer(writer, prefixes); + + boolean result = prefSerializer.writePrefixDeclarationRegistry(prefixes); + assertEquals("", writer.toString()); + assertFalse(result); } @Test @@ -277,4 +288,12 @@ public void serializeDataSourceDeclaration_fails() throws IOException { getThrowingSerializer().writeStatement(csvSourceDecl); } + @Test(expected = IOException.class) + public void serializePrefixDeclarations_fails() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setBaseIri("http://example.org/base"); + prefixes.setPrefixIri("eg:", "http://example.org/"); + getThrowingSerializer().writePrefixDeclarationRegistry(prefixes); + } + } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index ea3254874..6fa079bce 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -163,6 +163,6 @@ public void writeKnowledgeBase_alsoRuleAndDataSource_succeeds() throws IOExcepti StringWriter writer = new StringWriter(); this.kb.writeKnowledgeBase(writer); assertEquals("@source S[1]: sparql(<" + sparqlIri + ">, \"?X\", \"" + sparqlBgp - + "\") .\nP(c) .\nP(d) .\nQ(c) .\nP(?X) :- Q(?X) .\n", writer.toString()); + + "\") .\n\nP(c) .\nP(d) .\nQ(c) .\n\nP(?X) :- Q(?X) .\n", writer.toString()); } } From 148bc8df245a3788aea3366fe46dbaab72d757ff Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 10:03:02 +0200 Subject: [PATCH 0959/1255] pretty print query results --- .../commands/QueryCommandInterpreter.java | 10 ++- .../reasoner/LiteralQueryResultPrinter.java | 85 +++++++++++++++++++ 2 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 5bead8d3f..4637e6cca 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.commands; +import java.io.IOException; + /*- * #%L * Rulewerk Core Components @@ -28,6 +30,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Timer; @@ -59,17 +62,22 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("Unrecognized arguments"); } + LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(literal, interpreter.getWriter(), + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + Timer timer = new Timer("query"); timer.start(); try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { int count = 0; while (count != limit && answers.hasNext()) { - interpreter.getWriter().println(" " + answers.next()); + printer.write(answers.next()); count++; } timer.stop(); interpreter.getWriter().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms. Results are " + answers.getCorrectness() + "."); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java new file mode 100644 index 000000000..0335af239 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java @@ -0,0 +1,85 @@ +package org.semanticweb.rulewerk.core.reasoner; + +import java.io.IOException; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; +import java.util.LinkedHashMap; +import java.util.Map.Entry; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * Class for writing {@link QueryResult} objects in pretty print. + * + * @author Markus Kroetzsch + * + */ +public class LiteralQueryResultPrinter { + + final LinkedHashMap firstIndex = new LinkedHashMap<>(); + final PrefixDeclarationRegistry prefixDeclarationRegistry; + final Writer writer; + final Serializer serializer; + + public LiteralQueryResultPrinter(PositiveLiteral positiveLiteral, Writer writer, + PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.writer = writer; + this.serializer = new Serializer(writer, prefixDeclarationRegistry); + this.prefixDeclarationRegistry = prefixDeclarationRegistry; + + int i = 0; + for (Term term : positiveLiteral.getArguments()) { + if (term.getType() == TermType.UNIVERSAL_VARIABLE) { + UniversalVariable variable = (UniversalVariable) term; + if (!firstIndex.containsKey(variable)) { + firstIndex.put(variable, i); + } + } + i++; + } + } + + public void write(QueryResult queryResult) throws IOException { + boolean first = true; + for (Entry entry : firstIndex.entrySet()) { + if (first) { + first = false; + } else { + writer.write(", "); + } + serializer.writeUniversalVariable(entry.getKey()); + writer.write(" -> "); + serializer.writeTerm(queryResult.getTerms().get(entry.getValue())); + } + if (first) { + writer.write("true"); + } + writer.write("\n"); + } +} From 9f16e30d2af281a64c7f1ba8225e074de5f6e413 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 10:33:05 +0200 Subject: [PATCH 0960/1255] better display of Boolean query results --- .../commands/QueryCommandInterpreter.java | 15 +++-- .../reasoner/LiteralQueryResultPrinter.java | 61 +++++++++++++++++-- 2 files changed, 65 insertions(+), 11 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 4637e6cca..fa6cb3f7a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -68,14 +68,19 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio Timer timer = new Timer("query"); timer.start(); try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { - int count = 0; - while (count != limit && answers.hasNext()) { + while (printer.getResultCount() != limit && answers.hasNext()) { printer.write(answers.next()); - count++; } timer.stop(); - interpreter.getWriter().println(count + " result(s) in " + timer.getTotalCpuTime() / 1000000 - + "ms. Results are " + answers.getCorrectness() + "."); + + if (printer.isBooleanQuery()) { + interpreter.printEmph(printer.hadResults() ? "true\n" : "false\n"); + interpreter.printNormal("Answered in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } else { + interpreter.printNormal( + printer.getResultCount() + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } + interpreter.printNormal(" Results are " + answers.getCorrectness() + ".\n"); } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java index 0335af239..129423051 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java @@ -43,15 +43,28 @@ public class LiteralQueryResultPrinter { final LinkedHashMap firstIndex = new LinkedHashMap<>(); - final PrefixDeclarationRegistry prefixDeclarationRegistry; final Writer writer; final Serializer serializer; + int resultCount = 0; + + /** + * Constructor. + * + * @param positiveLiteral the query pattern for which query results + * are to be printed + * @param writer the object to write the output to + * @param prefixDeclarationRegistry information on prefixes used to compute IRI + * abbreviations; can be null + */ public LiteralQueryResultPrinter(PositiveLiteral positiveLiteral, Writer writer, PrefixDeclarationRegistry prefixDeclarationRegistry) { this.writer = writer; - this.serializer = new Serializer(writer, prefixDeclarationRegistry); - this.prefixDeclarationRegistry = prefixDeclarationRegistry; + if (prefixDeclarationRegistry == null) { + this.serializer = new Serializer(writer); + } else { + this.serializer = new Serializer(writer, prefixDeclarationRegistry); + } int i = 0; for (Term term : positiveLiteral.getArguments()) { @@ -65,6 +78,14 @@ public LiteralQueryResultPrinter(PositiveLiteral positiveLiteral, Writer writer, } } + /** + * Writes a {@link QueryResult} to the specified writer. Nothing is written for + * results of Boolean queries (not even a linebreak). + * + * @param queryResult the {@link QueryResult} to write; this result must be + * based on the query literal specified in the constructor + * @throws IOException if a problem occurred in writing + */ public void write(QueryResult queryResult) throws IOException { boolean first = true; for (Entry entry : firstIndex.entrySet()) { @@ -77,9 +98,37 @@ public void write(QueryResult queryResult) throws IOException { writer.write(" -> "); serializer.writeTerm(queryResult.getTerms().get(entry.getValue())); } - if (first) { - writer.write("true"); + resultCount++; + if (!first) { + writer.write("\n"); } - writer.write("\n"); } + + /** + * Returns the number of results written so far. + * + * @return number of results + */ + public int getResultCount() { + return resultCount; + } + + /** + * Returns true if the query has had any results. + * + * @return true if query result is not empty + */ + public boolean hadResults() { + return resultCount != 0; + } + + /** + * Returns true if the query is boolean, i.e., has no answer variables. + * + * @return true if query is boolean + */ + public boolean isBooleanQuery() { + return firstIndex.size() == 0; + } + } From 7a9c6d180f2cb1c4f7ca3d587c02e9dfa81da5dc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 10:54:22 +0200 Subject: [PATCH 0961/1255] add space before final . this prevents some parse errors in the current code (which should eventually be fixed in the parser) --- .../org/semanticweb/rulewerk/client/shell/CommandReader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index f3aeb72e6..48a55eeea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -71,7 +71,7 @@ public Command readCommand() { readLine = "@" + readLine; } if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + "."; + readLine = readLine + " ."; } try { From 39b1e72178d64a9c461cda589ef61363baba8f8e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 11:23:17 +0200 Subject: [PATCH 0962/1255] support COUNT and csv export --- .../commands/QueryCommandInterpreter.java | 130 ++++++++++++++---- 1 file changed, 105 insertions(+), 25 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index fa6cb3f7a..2589ec056 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -30,44 +30,117 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Timer; public class QueryCommandInterpreter implements CommandInterpreter { public static Term KEYWORD_LIMIT = Expressions.makeAbstractConstant("LIMIT"); + public static Term KEYWORD_COUNT = Expressions.makeAbstractConstant("COUNT"); + public static Term KEYWORD_TOFILE = Expressions.makeAbstractConstant("EXPORTCSV"); + + private PositiveLiteral queryLiteral; + private int limit; + private boolean doCount; + private String csvFile; @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + processArguments(command.getArguments()); + + if (doCount) { + printCountQueryResults(interpreter); + } else if (csvFile == null) { + printQueryResults(interpreter); + } else { + exportQueryResults(interpreter); + } + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" + + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" + + " limit: maximal number of results to be shown\n" + + " filename: string path to CSV file for exporting query results"; + } - List arguments = command.getArguments(); - PositiveLiteral literal; + @Override + public String getSynopsis() { + return "print or export query results"; + } + + private void processArguments(List arguments) throws CommandExecutionException { + int pos = 0; + limit = -1; + doCount = false; + csvFile = null; + + if (arguments.size() > 0 && KEYWORD_COUNT.equals(arguments.get(0).fromTerm().orElse(null))) { + doCount = true; + pos++; + } - if (arguments.size() > 0 && arguments.get(0).fromPositiveLiteral().isPresent()) { - literal = arguments.get(0).fromPositiveLiteral().get(); + if (arguments.size() > pos && arguments.get(pos).fromPositiveLiteral().isPresent()) { + queryLiteral = arguments.get(pos).fromPositiveLiteral().get(); + pos++; } else { - throw new CommandExecutionException("First argument must be a query literal."); + throw new CommandExecutionException("A query literal must be given."); } - int limit = -1; - if (arguments.size() == 3 && KEYWORD_LIMIT.equals(arguments.get(1).fromTerm().orElse(null)) - && arguments.get(2).fromTerm().isPresent()) { - try { - limit = Terms.extractInt(arguments.get(2).fromTerm().get()); - } catch (IllegalArgumentException e) { - throw new CommandExecutionException("Invalid limit given: " + arguments.get(3).fromTerm().get()); + while (arguments.size() > pos) { + if (arguments.size() > pos + 1 && KEYWORD_LIMIT.equals(arguments.get(pos).fromTerm().orElse(null)) + && arguments.get(pos + 1).fromTerm().isPresent()) { + try { + limit = Terms.extractInt(arguments.get(pos + 1).fromTerm().get()); + pos += 2; + } catch (IllegalArgumentException e) { + throw new CommandExecutionException( + "Invalid limit given: " + arguments.get(pos + 1).fromTerm().get()); + } + } else if (arguments.size() > pos + 1 && KEYWORD_TOFILE.equals(arguments.get(pos).fromTerm().orElse(null)) + && arguments.get(pos + 1).fromTerm().isPresent()) { + try { + csvFile = Terms.extractString(arguments.get(pos + 1).fromTerm().get()); + pos += 2; + } catch (IllegalArgumentException e) { + throw new CommandExecutionException( + "Invalid filename given: " + arguments.get(pos + 1).fromTerm().get()); + } + } else { + throw new CommandExecutionException("Unrecognized arguments"); } - } else if (arguments.size() != 1) { - throw new CommandExecutionException("Unrecognized arguments"); } + } - LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(literal, interpreter.getWriter(), + private void printCountQueryResults(Interpreter interpreter) throws CommandExecutionException { + if (limit != -1) { + throw new CommandExecutionException("LIMIT not supported with COUNT"); + } + if (csvFile != null) { + throw new CommandExecutionException("COUNT results cannot be exported to CSV"); + } + + Timer timer = new Timer("query"); + timer.start(); + QueryAnswerCount count = interpreter.getReasoner().countQueryAnswers(queryLiteral); + timer.stop(); + + interpreter.printNormal(String.valueOf(count.getCount()) + "\n"); + interpreter.printNormal("Answered in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.printNormal(" This result is " + count.getCorrectness() + ".\n"); + } + + private void printQueryResults(Interpreter interpreter) throws CommandExecutionException { + LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(queryLiteral, interpreter.getWriter(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Timer timer = new Timer("query"); timer.start(); - try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(literal, true)) { + try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(queryLiteral, true)) { while (printer.getResultCount() != limit && answers.hasNext()) { printer.write(answers.next()); } @@ -86,15 +159,22 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " [LIMIT ] .\n" - + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" - + " limit: maximal number of results to be shown"; - } + private void exportQueryResults(Interpreter interpreter) throws CommandExecutionException { + if (limit != -1) { + throw new CommandExecutionException("LIMIT not supported for CSV export"); + } - @Override - public String getSynopsis() { - return "print results to queries"; + Timer timer = new Timer("query"); + timer.start(); + Correctness correctness; + try { + correctness = interpreter.getReasoner().exportQueryAnswersToCsv(queryLiteral, csvFile, true); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + timer.stop(); + + interpreter.printNormal("Written query result file in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.printNormal(" This result is " + correctness + ".\n"); } } From f83768fc6df73a1648695ae00983c674a19c9f1d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 11:25:25 +0200 Subject: [PATCH 0963/1255] correct help message --- .../semanticweb/rulewerk/commands/QueryCommandInterpreter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 2589ec056..63fb5bea9 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -63,7 +63,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public String getHelp(String commandName) { return "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" - + " query literal: positive literal; may use ?queryVariables and ?existentialVariables\n" + + " query literal: positive literal, possibly with ?queryVariables\n" + " limit: maximal number of results to be shown\n" + " filename: string path to CSV file for exporting query results"; } From 75040d32a42717ee0f189ec197a5658465084490 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 12:17:09 +0200 Subject: [PATCH 0964/1255] improved messages --- .../commands/LoadCommandInterpreter.java | 32 ++++++------------- .../commands/ReasonCommandInterpreter.java | 2 +- 2 files changed, 11 insertions(+), 23 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 7b12e755e..c615bd08a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -24,7 +24,6 @@ import java.io.FileNotFoundException; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -32,28 +31,17 @@ public class LoadCommandInterpreter implements CommandInterpreter { @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - if (command.getArguments().size() == 1) { - String fileName; - try { - fileName = Terms.extractString( - command.getArguments().get(0).fromTerm().orElseThrow(() -> new CommandExecutionException( - "Expected string for file name, but did not find a term."))); - } catch (IllegalArgumentException e) { - throw new CommandExecutionException("Failed to convert term given for file name to string."); - } - try { - FileInputStream fileInputStream = new FileInputStream(fileName); - RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); - } catch (FileNotFoundException e) { - throw new CommandExecutionException(e.getMessage(), e); - } catch (ParsingException e) { - interpreter.getWriter().println("Error parsing file: " + e.getMessage()); - } - - } else { - throw new CommandExecutionException(getHelp(command.getName())); + Interpreter.validateArgumentCount(command, 1); + String fileName = Interpreter.extractStringArgument(command, 0, "filename"); + + try { + FileInputStream fileInputStream = new FileInputStream(fileName); + RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); + } catch (FileNotFoundException e) { + throw new CommandExecutionException(e.getMessage(), e); + } catch (ParsingException e) { + interpreter.getWriter().println("Error parsing file: " + e.getMessage()); } - } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index fe0b9f580..59927008c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -45,7 +45,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getWriter().println("... finished in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.getWriter().println("... finished in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); } @Override From 0be1d8a0404e05b41ecbb7bcbf3146d441f8b4af Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 12:17:38 +0200 Subject: [PATCH 0965/1255] support exporting infernces and kbs --- .../commands/ExportCommandInterpreter.java | 95 +++++++++++++++++++ .../rulewerk/commands/Interpreter.java | 8 +- 2 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java new file mode 100644 index 000000000..b125893a0 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -0,0 +1,95 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.StandardCharsets; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class ExportCommandInterpreter implements CommandInterpreter { + + static final String TASK_KB = "KB"; + static final String TASK_INFERENCES = "INFERENCES"; + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + + String task = Interpreter.extractNameArgument(command, 0, "task"); + String fileName = Interpreter.extractStringArgument(command, 1, "filename"); + + if (TASK_KB.equals(task)) { + exportKb(interpreter, fileName); + } else if (TASK_INFERENCES.equals(task)) { + exportInferences(interpreter, fileName); + } else { + throw new CommandExecutionException( + "Unknown task " + task + ". Should be " + TASK_KB + " or " + TASK_INFERENCES); + } + + } + + @Override + public String getHelp(String commandName) { + return "Usage: @" + commandName + " TASK \"filename\" .\n" // + + " TASK: what to export; can be KB or INFERENCES\n" // + + " \"filename\": string path export file (suggested extension: .rls)"; + } + + @Override + public String getSynopsis() { + return "export knowledgebase or inferences to a Rulewerk file"; + } + + private void exportInferences(Interpreter interpreter, String fileName) throws CommandExecutionException { + Timer timer = new Timer("export"); + Correctness correctness; + try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + timer.start(); + correctness = interpreter.getReasoner().writeInferences(writer); + timer.stop(); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + + interpreter.printNormal("Exported all inferences in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); + interpreter.printNormal(" This result is " + correctness + ".\n"); + } + + private void exportKb(Interpreter interpreter, String fileName) throws CommandExecutionException { + Timer timer = new Timer("export"); + try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + timer.start(); + interpreter.getKnowledgeBase().writeKnowledgeBase(writer); + timer.stop(); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 724b61271..bf7afefbe 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -50,7 +50,8 @@ public class Interpreter { final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - public Interpreter(final Reasoner reasoner, final StyledPrinter printer, final ParserConfiguration parserConfiguration) { + public Interpreter(final Reasoner reasoner, final StyledPrinter printer, + final ParserConfiguration parserConfiguration) { this.reasoner = reasoner; this.printer = printer; this.parserConfiguration = parserConfiguration; @@ -154,6 +155,7 @@ private void registerDefaultCommandInterpreters() { this.registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); this.registerCommandInterpreter("reason", new ReasonCommandInterpreter()); this.registerCommandInterpreter("query", new QueryCommandInterpreter()); + this.registerCommandInterpreter("export", new ExportCommandInterpreter()); this.registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); } @@ -197,8 +199,8 @@ public static String extractNameArgument(final Command command, final int index, } } - public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, final String parameterName) - throws CommandExecutionException { + public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, + final String parameterName) throws CommandExecutionException { return command.getArguments().get(index).fromPositiveLiteral() .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } From 5fde93ea8b5fd54b223cd2fa0e938b818b002e43 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 14:01:49 +0200 Subject: [PATCH 0966/1255] Use Writer rather than PrintWriter --- .../rulewerk/client/shell/Shell.java | 5 ++- .../client/shell/TerminalStyledPrinter.java | 1 + .../commands/AssertCommandInterpreter.java | 2 +- .../rulewerk/commands/Interpreter.java | 4 +-- .../commands/LoadCommandInterpreter.java | 2 +- .../commands/ReasonCommandInterpreter.java | 6 ++-- .../RemoveSourceCommandInterpreter.java | 6 ++-- .../commands/RetractCommandInterpreter.java | 2 +- .../commands/SimpleStyledPrinter.java | 32 ++++++++++++------- .../rulewerk/commands/StyledPrinter.java | 16 +++++----- 10 files changed, 43 insertions(+), 33 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 9a2cd08d0..a36caba3c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -51,7 +51,7 @@ public void run(final CommandReader commandReader) { try { command = commandReader.readCommand(); } catch (final Exception e) { - interpreter.getWriter().println("Unexpected error: " + e.getMessage()); + interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); e.printStackTrace(); continue; } @@ -60,12 +60,11 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - interpreter.getWriter().println("Error: " + e.getMessage()); + interpreter.printNormal("Error: " + e.getMessage() + "\n"); } } } interpreter.printSection("Existing Rulewerk shell ... bye.\n\n"); - interpreter.getWriter().flush(); } public void exitShell() { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java index 8e77422f5..d481f2c3a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java @@ -68,5 +68,6 @@ public PrintWriter getWriter() { private void printStyled(String string, AttributedStyle attributedStyle) { AttributedString attributedString = new AttributedString(string, attributedStyle); getWriter().print(attributedString.toAnsi(terminal)); + getWriter().flush(); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 99e1c90e7..2e5602cd0 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getWriter().println("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.printNormal("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index bf7afefbe..41756b5ea 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -2,7 +2,7 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; -import java.io.PrintWriter; +import java.io.Writer; /*- * #%L @@ -121,7 +121,7 @@ public ParserConfiguration getParserConfiguration() { return this.parserConfiguration; } - public PrintWriter getWriter() { + public Writer getWriter() { return this.printer.getWriter(); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index c615bd08a..403cea74d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -40,7 +40,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { - interpreter.getWriter().println("Error parsing file: " + e.getMessage()); + interpreter.printNormal("Error parsing file: " + e.getMessage() + "\n"); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 59927008c..4b53cecda 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -34,8 +34,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException("This command supports no arguments."); } - interpreter.getWriter().println("Loading and materializing inferences ..."); - interpreter.getWriter().flush(); + interpreter.printNormal("Loading and materializing inferences ...\n"); Timer timer = new Timer("reasoning"); timer.start(); @@ -45,7 +44,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); - interpreter.getWriter().println("... finished in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); + interpreter.printNormal("... finished in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 8da1513a8..afa1d2fa8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -47,9 +47,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (dataSource != null) { DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { - interpreter.getWriter().println("Removed specified data source declaration."); + interpreter.printNormal("Removed specified data source declaration.\n"); } else { - interpreter.getWriter().println("Specified data source declaration not found in knowledge base."); + interpreter.printNormal("Specified data source declaration not found in knowledge base.\n"); } } else { int count = 0; @@ -60,7 +60,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio count++; } } - interpreter.getWriter().println("Removed " + count + " matching data source declaration(s)."); + interpreter.printNormal("Removed " + count + " matching data source declaration(s).\n"); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 5680ae27b..c44ae848b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -50,7 +50,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } - interpreter.getWriter().println("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s)."); + interpreter.printNormal("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java index 3c91a2218..1ba22dfe0 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.commands; +import java.io.IOException; + /*- * #%L * Rulewerk command execution support @@ -20,7 +22,7 @@ * #L% */ -import java.io.PrintWriter; +import java.io.Writer; /** * Simple implementation of {@link StyledPrinter} based on an arbitrary @@ -31,40 +33,48 @@ */ public class SimpleStyledPrinter implements StyledPrinter { - final PrintWriter printWriter; + final Writer writer; - public SimpleStyledPrinter(final PrintWriter printWriter) { - this.printWriter = printWriter; + public SimpleStyledPrinter(final Writer writer) { + this.writer = writer; } @Override public void printNormal(String string) { - printWriter.print(string); + write(string); } @Override public void printSection(String string) { - printWriter.print(string); + write(string); } @Override public void printEmph(String string) { - printWriter.print(string); + write(string); } @Override public void printCode(String string) { - printWriter.print(string); + write(string); } @Override public void printImportant(String string) { - printWriter.print(string); + write(string); } @Override - public PrintWriter getWriter() { - return printWriter; + public Writer getWriter() { + return writer; + } + + private void write(String string) { + try { + writer.write(string); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java index a3d73df78..8e642b597 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java @@ -20,20 +20,20 @@ * #L% */ -import java.io.PrintWriter; +import java.io.Writer; public interface StyledPrinter { - + void printNormal(String string); - + void printSection(String string); - + void printEmph(String string); - + void printCode(String string); - + void printImportant(String string); - - PrintWriter getWriter(); + + Writer getWriter(); } From 99ee054d95466329c0801b321e058c6043cadd06 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 15:04:12 +0200 Subject: [PATCH 0967/1255] change how command help is printed --- .../shell/commands/ExitCommandInterpreter.java | 14 +++++++------- .../commands/AddSourceCommandInterpreter.java | 6 +++--- .../commands/AssertCommandInterpreter.java | 6 +++--- .../rulewerk/commands/CommandInterpreter.java | 9 ++++----- .../commands/ExportCommandInterpreter.java | 12 +++++++----- .../rulewerk/commands/HelpCommandInterpreter.java | 9 +++++---- .../rulewerk/commands/LoadCommandInterpreter.java | 4 ++-- .../rulewerk/commands/QueryCommandInterpreter.java | 11 ++++++----- .../commands/ReasonCommandInterpreter.java | 4 ++-- .../commands/RemoveSourceCommandInterpreter.java | 6 +++--- .../commands/RetractCommandInterpreter.java | 6 +++--- .../commands/SetPrefixCommandInterpreter.java | 4 ++-- .../commands/ShowKbCommandInterpreter.java | 4 ++-- 13 files changed, 49 insertions(+), 46 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 5c2ac4c83..ea2645279 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -25,18 +25,18 @@ import org.semanticweb.rulewerk.client.shell.Shell; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; public class ExitCommandInterpreter implements CommandInterpreter { public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>()); - public static enum ExitCommandName - { + public static enum ExitCommandName { exit; public static boolean isExitCommand(final String commandName) { - for(final ExitCommandName name: values()) { + for (final ExitCommandName name : values()) { if (name.toString().equals(commandName)) { return true; } @@ -44,16 +44,16 @@ public static boolean isExitCommand(final String commandName) { return false; } } - + final Shell shell; - + public ExitCommandInterpreter(Shell shell) { this.shell = shell; } @Override - public String getHelp(final String commandName) { - return "Usage: " + commandName + "."; + public void printHelp(final String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: " + commandName + ".\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index f0182e3dd..5fe8936cd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -52,11 +52,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " : a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources."; + + "Note that every predicate can have multiple sources.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java index 2e5602cd0..bf194e079 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -57,10 +57,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " ()+ .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + " fact or rule: statement(s) to be added to the knowledge base\n" - + "Reasoning needs to be invoked after finishing addition of statements."; + + "Reasoning needs to be invoked after finishing addition of statements.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java index 7959a3376..ff2c1f154 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java @@ -40,13 +40,12 @@ public interface CommandInterpreter { void run(Command command, Interpreter interpreter) throws CommandExecutionException; /** - * Return a text that describes command use and parameters, using the given + * Prints a text that describes command use and parameters, using the given * command name. The output should start with a "Usage:" line, followed by - * single-space-indented parameter descriptions. - * - * @return help message + * single-space-indented parameter descriptions, and it should end with a + * newline. */ - String getHelp(String commandName); + void printHelp(String commandName, Interpreter interpreter); /** * Returns a short line describing the purpose of the command. diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index b125893a0..c6f518f13 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -54,10 +54,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " TASK \"filename\" .\n" // + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " TASK \"filename\" .\n" // + " TASK: what to export; can be KB or INFERENCES\n" // - + " \"filename\": string path export file (suggested extension: .rls)"; + + " \"filename\": string path export file (suggested extension: .rls)\n"); } @Override @@ -76,7 +76,8 @@ private void exportInferences(Interpreter interpreter, String fileName) throws C throw new CommandExecutionException(e.getMessage(), e); } - interpreter.printNormal("Exported all inferences in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); + interpreter.printNormal("Exported all inferences in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); interpreter.printNormal(" This result is " + correctness + ".\n"); } @@ -89,7 +90,8 @@ private void exportKb(Interpreter interpreter, String fileName) throws CommandEx } catch (IOException e) { throw new CommandExecutionException(e.getMessage(), e); } - interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); + interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); } } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 292a50b2a..431e25205 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -47,18 +47,19 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (interpreter.commandInterpreters.containsKey(helpCommand)) { interpreter.printCode("@" + helpCommand); interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis() + "\n"); - interpreter.printNormal(interpreter.commandInterpreters.get(helpCommand).getHelp(helpCommand) + "\n"); + interpreter.commandInterpreters.get(helpCommand).printHelp(helpCommand, interpreter); } else { interpreter.printNormal("Command '" + helpCommand + "' not known.\n"); } } else { - interpreter.printNormal(getHelp(command.getName())); + printHelp(command.getName(), interpreter); } } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal( + "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 403cea74d..feeea25c2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -45,8 +45,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file"; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 63fb5bea9..310505656 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -61,11 +61,12 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" - + " query literal: positive literal, possibly with ?queryVariables\n" - + " limit: maximal number of results to be shown\n" - + " filename: string path to CSV file for exporting query results"; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal( + "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" + + " query literal: positive literal, possibly with ?queryVariables\n" + + " limit: maximal number of results to be shown\n" + + " filename: string path to CSV file for exporting query results\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java index 4b53cecda..6d56aeee5 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -49,8 +49,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " ."; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " .\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index afa1d2fa8..9a7c606a4 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -66,11 +66,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " []: .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " (optional): a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources."; + + "Note that every predicate can have multiple sources.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index c44ae848b..9acdbb4a3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -54,10 +54,10 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " ()+ .\n" + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + " fact or rule: statement(s) to be removed from the knowledge base\n" - + "Reasoning needs to be invoked after finishing the removal of statements."; + + "Reasoning needs to be invoked after finishing the removal of statements.\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java index ed31fb400..c713f7789 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -40,8 +40,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + " : ."; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " : .\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java index b854e0b4d..138f3ca48 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -37,8 +37,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public String getHelp(String commandName) { - return "Usage: @" + commandName + "."; + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " .\n"); } @Override From 67a2b7b48d828af15b754b5f83a0aab3fc24938b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 16:01:09 +0200 Subject: [PATCH 0968/1255] remove unused constructor --- .../rulewerk/commands/CommandExecutionException.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java index 9b9a5c6b0..9d4fcce4b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java @@ -29,10 +29,6 @@ public class CommandExecutionException extends RulewerkException { */ private static final long serialVersionUID = 1479091500621334935L; - public CommandExecutionException(Throwable cause) { - super(cause); - } - public CommandExecutionException(String message, Throwable cause) { super(message, cause); } From 11b6d32dd9916ec6e649640d9744897daffc214b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 17:17:23 +0200 Subject: [PATCH 0969/1255] fix documentation --- .../semanticweb/rulewerk/core/reasoner/KnowledgeBase.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index fda90958d..6936811e7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -25,7 +25,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; @@ -581,9 +580,9 @@ public String unresolveAbsoluteIri(String iri) { } /** - * Serialise the KnowledgeBase to the {@link OutputStream}. + * Serialise the KnowledgeBase to the {@link Writer}. * - * @param writer the {@link OutputStream} to serialise to. + * @param writer the {@link Writer} to serialise to. * * @throws IOException if an I/O error occurs while writing to given output * stream From d71b4e362c28091a30bf45a73a46fa97b1dd3ef8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 18:26:52 +0200 Subject: [PATCH 0970/1255] extract method to open files for testing --- .../rulewerk/commands/ExportCommandInterpreter.java | 7 ++----- .../org/semanticweb/rulewerk/commands/Interpreter.java | 7 +++++++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index c6f518f13..4ead3798d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -20,11 +20,8 @@ * #L% */ -import java.io.FileOutputStream; import java.io.IOException; -import java.io.OutputStreamWriter; import java.io.Writer; -import java.nio.charset.StandardCharsets; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.Correctness; @@ -68,7 +65,7 @@ public String getSynopsis() { private void exportInferences(Interpreter interpreter, String fileName) throws CommandExecutionException { Timer timer = new Timer("export"); Correctness correctness; - try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); correctness = interpreter.getReasoner().writeInferences(writer); timer.stop(); @@ -83,7 +80,7 @@ private void exportInferences(Interpreter interpreter, String fileName) throws C private void exportKb(Interpreter interpreter, String fileName) throws CommandExecutionException { Timer timer = new Timer("export"); - try (Writer writer = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)) { + try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); interpreter.getKnowledgeBase().writeKnowledgeBase(writer); timer.stop(); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 41756b5ea..d6dc0fecd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,7 +1,10 @@ package org.semanticweb.rulewerk.commands; import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.io.InputStream; +import java.io.OutputStreamWriter; import java.io.Writer; /*- @@ -205,4 +208,8 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } + public Writer getFileWriter(String fileName) throws FileNotFoundException { + return new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8); + } + } From 1ba2d701aa8e81575f99ff25abc2d569e2245fa7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 18:27:08 +0200 Subject: [PATCH 0971/1255] fix usage message --- .../semanticweb/rulewerk/commands/HelpCommandInterpreter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java index 431e25205..7707137e2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -58,8 +58,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal( - "Usage: @" + commandName + " [command name] .\n" + "\t command name: command to get detailed help for"); + interpreter.printNormal("Usage: @" + commandName + " [command name] .\n" // + + "\t command name: command to get detailed help for\n"); } @Override From 436d10e963b96906a0494e35c3a3a021666f84af Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 18:28:11 +0200 Subject: [PATCH 0972/1255] more unit tests --- .../AddSourceCommandInterpreterTest.java | 120 +++++++ .../AssertCommandInterpreterTest.java | 95 ++++++ .../ExportCommandInterpreterTest.java | 156 +++++++++ .../commands/HelpCommandInterpreterTest.java | 117 +++++++ .../rulewerk/commands/InterpreterTest.java | 103 ++++++ .../commands/QueryCommandInterpreterTest.java | 313 ++++++++++++++++++ .../ReasonCommandInterpreterTest.java | 85 +++++ .../RemoveSourceCommandInterpreterTest.java | 176 ++++++++++ .../RetractCommandInterpreterTest.java | 108 ++++++ .../ShowKbCommandInterpreterTest.java | 97 ++++++ 10 files changed, 1370 insertions(+) create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java new file mode 100644 index 000000000..e56c4ea3d --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java @@ -0,0 +1,120 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AddSourceCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter + .parseCommand("@addsource p[1] : sparql(, \"?x\", \"?x

\") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("addsource", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromPositiveLiteral().isPresent()); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(1, dataSourceDeclarations.size()); + assertTrue(dataSourceDeclarations.get(0).getDataSource() instanceof SparqlQueryResultDataSource); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource \"string\" p(a)."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentUnknownSource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: unknown(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentWrongAritySource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: load-rdf(\"file.nt\") ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[2]: p(a) p(b) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java new file mode 100644 index 000000000..0d6f01a78 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java @@ -0,0 +1,95 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AssertCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("assert", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + Literal literal = command.getArguments().get(0).fromPositiveLiteral().get(); + Rule rule = command.getArguments().get(1).fromRule().get(); + + assertEquals(Arrays.asList(literal), facts); + assertEquals(Arrays.asList(rule), rules); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new AssertCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new AssertCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java new file mode 100644 index 000000000..52207d1bf --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java @@ -0,0 +1,156 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ExportCommandInterpreterTest { + + @Test + public void correctUseKb_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + StringWriter fileWriter = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(fileWriter).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@export KB \"test.rls\" ."); + interpreter.runCommand(command); + + StringWriter anotherWriter = new StringWriter(); + interpreter.getKnowledgeBase().writeKnowledgeBase(anotherWriter); + + assertEquals("export", command.getName()); + assertEquals(2, command.getArguments().size()); + assertEquals(anotherWriter.toString(), fileWriter.toString()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseKbIoException_failse() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + + Command command = interpreter.parseCommand("@export KB \"test.rls\" ."); + interpreter.runCommand(command); + } + + @Test + public void correctUseInferences_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + StringWriter fileWriter = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(fileWriter).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + Mockito.when(interpreter.getReasoner().writeInferences(Mockito.any(Writer.class))) + .thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + + Command command = interpreter.parseCommand("@export INFERENCES \"test.rls\" ."); + interpreter.runCommand(command); + + assertEquals("export", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(writer.toString().contains(Correctness.SOUND_BUT_INCOMPLETE.toString())); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseInferencesIoException_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + + Command command = interpreter.parseCommand("@export INFERENCES \"test.rls\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void unknonwTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export UNKNOWN \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export \"string\" \"file.rls\"."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export KB 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export KB \"file.rls\" more ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ExportCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ExportCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java new file mode 100644 index 000000000..8a89c1cea --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java @@ -0,0 +1,117 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class HelpCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help ."); + interpreter.runCommand(command); + + String output = writer.toString(); + for (String commandName : interpreter.getRegisteredCommands()) { + assertTrue(output.contains("@" + commandName)); + } + } + + @Test + public void correctUseWithCommand_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help query."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentCount_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help query showkb ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void unknownCommandHelp_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help unknowncommand ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentTypeTerm_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help 123 ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentTypeFact_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help p(a) ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new HelpCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new HelpCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java new file mode 100644 index 000000000..ffe2c5380 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java @@ -0,0 +1,103 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class InterpreterTest { + + static public Interpreter getMockInterpreter(Writer writer) { + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return new Interpreter(reasoner, printer, parserConfiguration); + } + + /** + * Checks the basic format of command usage instructions and verifies that the + * given command name is used (not a fixed one). + * + * @param commandInterpreter + * @param interpreter + * @param writer + */ + static public void checkHelpFormat(CommandInterpreter commandInterpreter, Interpreter interpreter, + StringWriter writer) { + commandInterpreter.printHelp("commandname", interpreter); + String result = writer.toString(); + + assertTrue(result.startsWith("Usage: @commandname ")); + assertTrue(result.endsWith("\n")); + } + + static public void checkSynopsisFormat(CommandInterpreter commandInterpreter) { + String synopsis = commandInterpreter.getSynopsis(); + assertTrue(synopsis.length() < 70); + } + + @Test + public void getters_succeed() { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); + + assertEquals(knowledgeBase, interpreter.getKnowledgeBase()); + assertEquals(reasoner, interpreter.getReasoner()); + assertEquals(writer, interpreter.getWriter()); + assertEquals(parserConfiguration, interpreter.getParserConfiguration()); + } + + @Test(expected = CommandExecutionException.class) + public void unknownCommand_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@unknown ."); + interpreter.runCommand(command); + } + + @Test(expected = ParsingException.class) + public void malformedCommand_fails() throws ParsingException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = getMockInterpreter(writer); + + interpreter.parseCommand("malformed ."); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java new file mode 100644 index 000000000..a23f72b13 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java @@ -0,0 +1,313 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class QueryCommandInterpreterTest { + + class TestQueryResultIterator implements QueryResultIterator { + + final Iterator results; + + public TestQueryResultIterator(List results) { + this.results = results.iterator(); + } + + @Override + public boolean hasNext() { + return results.hasNext(); + } + + @Override + public QueryResult next() { + return results.next(); + } + + @Override + public Correctness getCorrectness() { + return Correctness.SOUND_AND_COMPLETE; + } + + @Override + public void close() { + } + + } + + @Test + public void correctUseQuery_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResult r1 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-1#"))); + QueryResult r2 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-2#"))); + QueryResult r3 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-3#"))); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList(r1, r2, r3)); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(output.contains("#TEST-1#")); + assertTrue(output.contains("#TEST-2#")); + assertFalse(output.contains("#TEST-3#")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseBooleanQueryTrue_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResult r1 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("TEST-1"))); + QueryResult r2 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-2#"))); + QueryResult r3 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-3#"))); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList(r1, r2, r3)); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(TEST-1) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertFalse(output.contains("TEST-1")); + assertFalse(output.contains("#TEST-2#")); + assertFalse(output.contains("#TEST-3#")); + assertTrue(output.startsWith("true")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseBooleanQueryFalse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList()); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(TEST-1) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(output.startsWith("false")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseCount_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + QueryAnswerCount queryAnswerCount = new QueryAnswerCountImpl(Correctness.SOUND_AND_COMPLETE, 42); + Mockito.when(interpreter.getReasoner().countQueryAnswers(Mockito.any(), Mockito.eq(true))) + .thenReturn(queryAnswerCount); + Mockito.when(interpreter.getReasoner().countQueryAnswers(Mockito.any())).thenReturn(queryAnswerCount); + + Command command = interpreter.parseCommand("@query COUNT p(?X) ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + + assertEquals("query", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(writer.toString().startsWith("42\n")); + assertTrue(writer.toString().contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseExport_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Mockito.when(interpreter.getReasoner().exportQueryAnswersToCsv(Mockito.any(), Mockito.eq("file.csv"), + Mockito.anyBoolean())).thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(writer.toString().contains(Correctness.SOUND_BUT_INCOMPLETE.toString())); + } + + @Test(expected = CommandExecutionException.class) + public void exportIoError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Mockito.when(interpreter.getReasoner().exportQueryAnswersToCsv(Mockito.any(), Mockito.eq("file.csv"), + Mockito.anyBoolean())).thenThrow(IOException.class); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountZero_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNoLiteral_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT LIMIT 10 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT p(?X) LIMIT 10 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithExportFile_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongLimitTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT \"10\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongLimitNoTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentMissingLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongExportFileTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongExportFileNoTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentMissingExportFile_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentExportWithLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT 10 EXPORTCSV \"test.csv\" ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new QueryCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new QueryCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java new file mode 100644 index 000000000..025540d02 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java @@ -0,0 +1,85 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ReasonCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Mockito.when(interpreter.getReasoner().getCorrectness()).thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + Mockito.when(interpreter.getReasoner().reason()).thenAnswer(I -> { + Mockito.when(interpreter.getReasoner().getCorrectness()).thenReturn(Correctness.SOUND_AND_COMPLETE); + return true; + }); + + Command command = interpreter.parseCommand("@reason ."); + interpreter.runCommand(command); + + assertEquals(Correctness.SOUND_AND_COMPLETE, interpreter.getReasoner().getCorrectness()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseReasonerException_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Mockito.when(interpreter.getReasoner().reason()).thenThrow(IOException.class); + + Command command = interpreter.parseCommand("@reason ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@reason p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ReasonCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ReasonCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java new file mode 100644 index 000000000..c5532acf9 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java @@ -0,0 +1,176 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RemoveSourceCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

"); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + + Command command = interpreter + .parseCommand("@delsource p[1] : sparql(, \"?x\", \"?x

\") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("delsource", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromPositiveLiteral().isPresent()); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test + public void correctUseNothingRemoved_succeeds() + throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

"); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + + Command command = interpreter + .parseCommand("@delsource another[1] : sparql(, \"?x\", \"?x

\") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), dataSourceDeclarations); + } + + @Test + public void correctUseRemoveAll_succeeds() + throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Predicate predicate2 = Expressions.makePredicate("q", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

"); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration2); + + Command command = interpreter.parseCommand("@delsource p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration2), dataSourceDeclarations); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@dellsource \"string\" p(a)."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[1]: \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentUnknownSource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[1]: unknown(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[2]: p(a) p(b) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountZero_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RemoveSourceCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RemoveSourceCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java new file mode 100644 index 000000000..3381ac9ec --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java @@ -0,0 +1,108 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RetractCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + Fact fact2 = Expressions.makeFact(q, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(fact2); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@retract p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("retract", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + assertEquals(Arrays.asList(fact2), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java new file mode 100644 index 000000000..5ddd3b231 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java @@ -0,0 +1,97 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ShowKbCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@showkb ."); + interpreter.runCommand(command); + + StringWriter anotherWriter = new StringWriter(); + interpreter.getKnowledgeBase().writeKnowledgeBase(anotherWriter); + + assertEquals("showkb", command.getName()); + assertEquals(0, command.getArguments().size()); + assertEquals(writer.toString(), anotherWriter.toString()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@showkb p(?X) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void ioError_fails() throws ParsingException, CommandExecutionException, IOException { + Writer writer = Mockito.mock(Writer.class); + Mockito.doThrow(IOException.class).when(writer).write(Mockito.anyString()); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@showkb ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ShowKbCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ShowKbCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 0e367daf33465e8aa2aa1dd4b8a20409f1de1db2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 21:37:27 +0200 Subject: [PATCH 0973/1255] yet more tests --- .../rulewerk/commands/Interpreter.java | 19 ++++ .../commands/LoadCommandInterpreter.java | 8 +- .../rulewerk/commands/InterpreterTest.java | 36 ++++++++ .../SetPrefixCommandInterpreterTest.java | 91 +++++++++++++++++++ 4 files changed, 150 insertions(+), 4 deletions(-) create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index d6dc0fecd..d75e8e235 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,6 +1,7 @@ package org.semanticweb.rulewerk.commands; import java.io.ByteArrayInputStream; +import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; @@ -208,8 +209,26 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); } + /** + * Returns a Writer to write to the specified file. + * + * @param fileName + * @return + * @throws FileNotFoundException + */ public Writer getFileWriter(String fileName) throws FileNotFoundException { return new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8); } + /** + * Returns an InputStream to read from the specified file. + * + * @param fileName + * @return + * @throws FileNotFoundException + */ + public InputStream getFileInputStream(String fileName) throws FileNotFoundException { + return new FileInputStream(fileName); + } + } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index feeea25c2..b8b74b0b8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -20,8 +20,8 @@ * #L% */ -import java.io.FileInputStream; import java.io.FileNotFoundException; +import java.io.InputStream; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.parser.ParsingException; @@ -35,12 +35,12 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String fileName = Interpreter.extractStringArgument(command, 0, "filename"); try { - FileInputStream fileInputStream = new FileInputStream(fileName); - RuleParser.parseInto(interpreter.getKnowledgeBase(), fileInputStream); + InputStream inputStream = interpreter.getFileInputStream(fileName); + RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { - interpreter.printNormal("Error parsing file: " + e.getMessage() + "\n"); + throw new CommandExecutionException("Error parsing file: " + e.getMessage(), e); } } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java index ffe2c5380..74e517e33 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java @@ -27,6 +27,7 @@ import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -100,4 +101,39 @@ public void malformedCommand_fails() throws ParsingException { interpreter.parseCommand("malformed ."); } + @Test + public void prefixesAreUsed_succeeds() throws ParsingException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + + Command command = interpreter.parseCommand("@somecommand eg:test ."); + + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertEquals("http://example.org/test", command.getArguments().get(0).fromTerm().get().getName()); + } + + @Test + public void print_succeeds() { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = Mockito.spy(new SimpleStyledPrinter(writer)); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + Reasoner reasoner = Mockito.mock(Reasoner.class); + Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); + + interpreter.printCode("Code"); + interpreter.printNormal("Normal"); + interpreter.printEmph("Emph"); + interpreter.printSection("Section"); + interpreter.printImportant("Important"); + + Mockito.verify(printer).printCode("Code"); + Mockito.verify(printer).printNormal("Normal"); + Mockito.verify(printer).printEmph("Emph"); + Mockito.verify(printer).printSection("Section"); + Mockito.verify(printer).printImportant("Important"); + + } + } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java new file mode 100644 index 000000000..9190c8407 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java @@ -0,0 +1,91 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class SetPrefixCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix eg: ."); + interpreter.runCommand(command); + + assertEquals("setprefix", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri("eg:")); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgument_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgument_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix pre: 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new SetPrefixCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new SetPrefixCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 308009586877611b4d9d6fa9ede3181a83b26c94 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 21:42:36 +0200 Subject: [PATCH 0974/1255] include commands in coverage --- coverage/pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/coverage/pom.xml b/coverage/pom.xml index b65b563fa..76ca8882d 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -44,6 +44,11 @@ rulewerk-parser ${project.version} + + ${project.groupId} + rulewerk-commands + ${project.version} + ${project.groupId} rulewerk-client From dcad863218eb750c3de89e0f98f15dd1670ca2fa Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:08:28 +0200 Subject: [PATCH 0975/1255] update documentation files --- README.md | 5 +++-- RELEASE-NOTES.md | 10 ++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 47b38c0f1..5d183be1f 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ The current release of Rulewerk is version 0.6.0. The easiest way of using the l Previous to version `0.6.0`, *rulewerk* project name was *vlog4j*. Older versions released under name *vlog4j* have `org.semanticweb.vlog4j` and `vlog4j-core`, the latest version being version `0.5.0`. -You need to use Java 1.8 or above. Available modules include: +You need to use Java 1.8 or above. Available source modules include: * **rulewerk-core**: essential data models for rules and facts, and essential reasoner functionality * **rulewerk-parser**: support for processing knowledge bases in [Rulewerk syntax](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar) @@ -30,9 +30,10 @@ You need to use Java 1.8 or above. Available modules include: * **rulewerk-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) * **rulewerk-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API * **rulewerk-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/rulewerk/wiki/Standalone-client) for Rulewerk. +* **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog-base. * Run ```mvn install``` to test if the setup works diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 79f9edb8a..71a54cce9 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -5,11 +5,21 @@ Rulewerk v0.7.0 --------------- New features: +* New interactive Rulewerk shell for rule reasoning from the command line client * Significant speedup in iterating over query results +* New class `LiteralQueryResultPrinter` for pretty-printing query results Other improvements: +* Improved serialization of knowledge bases (using namespaces) +* Simple (non-IRI, namespace-less) predicate names can now include - and _ * InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where two or more edges are the same. + +Breaking changes: +* The `Serializer` class in the core package has been replaced by a new implementation + with a completely different interface. +* The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. +* The `DataSource` interface requires a new method to be implemented. Rulewerk v0.6.0 --------------- From 0166159718db10f9a5605ca61cefe56b8cb762c0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:10:31 +0200 Subject: [PATCH 0976/1255] remove useless methods the command parsing method of Interpreter is usually preferable --- .../org/semanticweb/rulewerk/parser/RuleParser.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index 146fa1085..c9a00c103 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -26,7 +26,6 @@ import java.util.List; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Entity; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -254,15 +253,6 @@ public static DataSourceDeclaration parseDataSourceDeclaration(final String inpu return parseDataSourceDeclaration(input, null); } - public static Command parseCommand(final String input, ParserConfiguration parserConfiguration) - throws ParsingException { - return parseSyntaxFragment(input, JavaCCParser::command, "command", parserConfiguration); - } - - public static Command parseCommand(final String input) throws ParsingException { - return parseCommand(input, null); - } - static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { try { parser.parse(); From 73acbe6e227ed1876b6a7b315a45ec73a1caf630 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:22:21 +0200 Subject: [PATCH 0977/1255] display KB when starting --- .../examples/CompareWikidataDBpedia.java | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index d3249c93d..18a01975f 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -70,28 +70,30 @@ public static void main(final String[] args) throws ParsingException, IOExceptio // Configure the SPARQL data sources and some rules to analyse results: final String rules = "" // - + "@prefix wdqs: ." // - + "@prefix dbp: ." // - + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // - + "@source wdResult[2] : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "@prefix wdqs: .\n" // + + "@prefix dbp: .\n" // + + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') .\n" // + + "@source wdResult[2] : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') .\n" // + "% Rules:\n" // - + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // - + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // - + "result(?Wikipage) :- inWd(?Wikipage)." // - + "result(?Wikipage) :- inDbp(?Wikipage)." // - + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage)." - + "dbpOnly(?Wikipage) :- inDbp(?Wikipage), ~inWd(?Wikipage)." - + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage)." + ""; // + + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage).\n" // + + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage).\n" // + + "result(?Wikipage) :- inWd(?Wikipage).\n" // + + "result(?Wikipage) :- inDbp(?Wikipage).\n" // + + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage).\n" + + "dbpOnly(?Wikipage) :- inDbp(?Wikipage), ~inWd(?Wikipage).\n" + + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage).\n"; // + + System.out.println("Knowledge base used in this example:\n\n" + rules); final KnowledgeBase kb = RuleParser.parse(rules); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) + final long resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) .getCount(); - final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getCount(); - final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getCount(); + final long wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getCount(); + final long dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getCount(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); From 972d0894caa013ded123aee5a781bf8e2f7f9036 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:22:43 +0200 Subject: [PATCH 0978/1255] use new result printing facilities --- .../rulewerk/examples/ExamplesUtils.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index 87745d975..8d18a35f3 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -21,6 +21,7 @@ */ import java.io.IOException; +import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.List; @@ -34,6 +35,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; @@ -88,10 +90,17 @@ public static void configureLogging() { */ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { System.out.println("Answers to query " + queryAtom + " :"); + OutputStreamWriter writer = new OutputStreamWriter(System.out); + LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(queryAtom, writer, + reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()); try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { - answers.forEachRemaining(answer -> System.out.println(" - " + answer)); - + while (answers.hasNext()) { + printer.write(answers.next()); + writer.flush(); + } System.out.println("Query answers are: " + answers.getCorrectness()); + } catch (IOException e) { + throw new RuntimeException(e); } System.out.println(); } From 8891a295e3fda7eb46a26d9d9e7047eef4db94c2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Sun, 23 Aug 2020 22:42:54 +0200 Subject: [PATCH 0979/1255] comment out unused code --- .../rulewerk/core/reasoner/Timer.java | 630 +++++++++--------- 1 file changed, 315 insertions(+), 315 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java index 9b555ede2..1f41efa6e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java @@ -22,22 +22,22 @@ import java.lang.management.ManagementFactory; import java.lang.management.ThreadMXBean; -import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * Class for keeping CPU and system times. The class has a number of features * that can be used to measure and aggregate times across many threads and many * methods. * + * @implNote This file originates from the ELK Reasoner, where more extensive thread-aware + * timing was required. The file contains commented out functions from that source that + * could be used to activate those features here. + * * @author Markus Kroetzsch */ public class Timer { - private static Logger LOGGER = LoggerFactory.getLogger(Timer.class); + //private static Logger LOGGER = LoggerFactory.getLogger(Timer.class); /** Flag for indicating that no times should be taken (just count runs). */ public static final int RECORD_NONE = 0x00000000; @@ -223,316 +223,316 @@ public synchronized long stop() { return totalTime; } - /** - * Print logging information for the timer. The log only shows the recorded time - * of the completed start-stop cycles. If the timer is still running, then it - * will not be stopped to add the currently measured time to the output but a - * warning will be logged. - * - */ - public void log() { - if (LOGGER.isInfoEnabled()) { - String timerLabel; - if (threadId != 0) { - timerLabel = name + " (thread " + threadId + ")"; - } else if (threadCount > 1) { - timerLabel = name + " (over " + threadCount + " threads)"; - } else { - timerLabel = name; - } - - if (todoFlags == RECORD_NONE) { - LOGGER.info("Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); - } else { - String labels = ""; - String values = ""; - String separator; - - if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { - labels += "CPU"; - values += totalCpuTime / 1000000; - separator = "/"; - } else { - separator = ""; - } - if ((todoFlags & RECORD_WALLTIME) != 0) { - labels += separator + "Wall"; - values += separator + totalWallTime / 1000000; - } - if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { - labels += "/CPU avg"; - values += "/" + (float) (totalCpuTime) / measurements / 1000000; - } - if ((todoFlags & RECORD_WALLTIME) != 0) { - labels += "/Wall avg"; - values += "/" + (float) (totalWallTime) / measurements / 1000000; - } - if (threadCount > 1) { - if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { - labels += "/CPU per thread"; - values += "/" + (float) (totalCpuTime) / threadCount / 1000000; - } - if ((todoFlags & RECORD_WALLTIME) != 0) { - labels += "/Wall per thread"; - values += "/" + (float) (totalWallTime) / threadCount / 1000000; - } - } - - LOGGER.info( - "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); - } - - if (isRunning) { - LOGGER.warn("Timer " + timerLabel + " logged while it was still running"); - } - } - } - - /** - * Start a timer of the given string name for all todos and the current thread. - * If no such timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - */ - public static void startNamedTimer(String timerName) { - getNamedTimer(timerName).start(); - } - - /** - * Start a timer of the given string name for the current thread. If no such - * timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - */ - public static void startNamedTimer(String timerName, int todoFlags) { - getNamedTimer(timerName, todoFlags).start(); - } - - /** - * Start a timer of the given string name for the current thread. If no such - * timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - */ - public static void startNamedTimer(String timerName, int todoFlags, long threadId) { - getNamedTimer(timerName, todoFlags, threadId).start(); - } - - /** - * Stop a timer of the given string name for all todos and the current thread. - * If no such timer exists, -1 will be returned. Otherwise the return value is - * the CPU time that was measured. - * - * @param timerName the name of the timer - * @return CPU time if timer existed and was running, and -1 otherwise - */ - public static long stopNamedTimer(String timerName) { - return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); - } - - /** - * Stop a timer of the given string name for the current thread. If no such - * timer exists, -1 will be returned. Otherwise the return value is the CPU time - * that was measured. - * - * @param timerName the name of the timer - * @param todoFlags - * @return CPU time if timer existed and was running, and -1 otherwise - */ - public static long stopNamedTimer(String timerName, int todoFlags) { - return stopNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); - } - - /** - * Stop a timer of the given string name for the given thread. If no such timer - * exists, -1 will be returned. Otherwise the return value is the CPU time that - * was measured. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - * @return CPU time if timer existed and was running, and -1 otherwise - */ - public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { - Timer key = new Timer(timerName, todoFlags, threadId); - if (registeredTimers.containsKey(key)) { - return registeredTimers.get(key).stop(); - } else { - return -1; - } - } - - /** - * Reset a timer of the given string name for all todos and the current thread. - * If no such timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - */ - public static void resetNamedTimer(String timerName) { - getNamedTimer(timerName).reset(); - } - - /** - * Reset a timer of the given string name for the current thread. If no such - * timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - */ - public static void resetNamedTimer(String timerName, int todoFlags) { - getNamedTimer(timerName, todoFlags).reset(); - } - - /** - * Reset a timer of the given string name for the given thread. If no such timer - * exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - */ - public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { - getNamedTimer(timerName, todoFlags, threadId).reset(); - } - - /** - * Get a timer of the given string name that takes all possible times (todos) - * for the current thread. If no such timer exists yet, then it will be newly - * created. - * - * @param timerName the name of the timer - * @return timer - */ - public static Timer getNamedTimer(String timerName) { - return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); - } - - /** - * Returns all registered timers - * - * @return an iterable collection of named timers - */ - public static Iterable getNamedTimers() { - return registeredTimers.keySet(); - } - - /** - * Get a timer of the given string name and todos for the current thread. If no - * such timer exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @return timer - */ - public static Timer getNamedTimer(String timerName, int todoFlags) { - return getNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); - } - - /** - * Get a timer of the given string name for the given thread. If no such timer - * exists yet, then it will be newly created. - * - * @param timerName the name of the timer - * @param todoFlags - * @param threadId of the thread to track, or 0 if only system clock should be - * tracked - * @return timer - */ - public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { - Timer key = new Timer(timerName, todoFlags, threadId); - Timer previous = registeredTimers.putIfAbsent(key, key); - if (previous != null) { - return previous; - } - // else - return key; - } - - /** - * Collect the total times measured by all known named timers of the given name. - * - * @param timerName - * @return timer - */ - public static Timer getNamedTotalTimer(String timerName) { - long totalCpuTime = 0; - long totalSystemTime = 0; - int measurements = 0; - int threadCount = 0; - int todoFlags = RECORD_NONE; - Timer previousTimer = null; - for (Map.Entry entry : registeredTimers.entrySet()) { - if (entry.getValue().name.equals(timerName)) { - previousTimer = entry.getValue(); - threadCount += 1; - totalCpuTime += previousTimer.totalCpuTime; - totalSystemTime += previousTimer.totalWallTime; - measurements += previousTimer.measurements; - todoFlags |= previousTimer.todoFlags; - } - } - - if (threadCount == 1) { - return previousTimer; - } else { - Timer result = new Timer(timerName, todoFlags, 0); - result.totalCpuTime = totalCpuTime; - result.totalWallTime = totalSystemTime; - result.measurements = measurements; - result.threadCount = threadCount; - return result; - } - } - - public static void logAllNamedTimers(String timerName) { - for (Map.Entry entry : registeredTimers.entrySet()) { - if (entry.getValue().name.equals(timerName)) { - entry.getValue().log(); - } - } - } - - @Override - public int hashCode() { - // Jenkins hash, see http://www.burtleburtle.net/bob/hash/doobs.html and also - // http://en.wikipedia.org/wiki/Jenkins_hash_function. - int hash = name.hashCode(); - hash += (hash << 10); - hash ^= (hash >> 6); - hash += Long.valueOf(threadId).hashCode(); - hash += (hash << 10); - hash ^= (hash >> 6); - hash += Integer.valueOf(todoFlags).hashCode(); - hash += (hash << 10); - hash ^= (hash >> 6); - - hash += (hash << 3); - hash ^= (hash >> 11); - hash += (hash << 15); - return hash; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } else if (obj == null) { - return false; - } else if (getClass() != obj.getClass()) { - return false; - } else if (threadId == ((Timer) obj).threadId && todoFlags == ((Timer) obj).todoFlags - && name.equals(((Timer) obj).name)) { - return true; - } else { - return false; - } - } +// /** +// * Print logging information for the timer. The log only shows the recorded time +// * of the completed start-stop cycles. If the timer is still running, then it +// * will not be stopped to add the currently measured time to the output but a +// * warning will be logged. +// * +// */ +// public void log() { +// if (LOGGER.isInfoEnabled()) { +// String timerLabel; +// if (threadId != 0) { +// timerLabel = name + " (thread " + threadId + ")"; +// } else if (threadCount > 1) { +// timerLabel = name + " (over " + threadCount + " threads)"; +// } else { +// timerLabel = name; +// } +// +// if (todoFlags == RECORD_NONE) { +// LOGGER.info("Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); +// } else { +// String labels = ""; +// String values = ""; +// String separator; +// +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "CPU"; +// values += totalCpuTime / 1000000; +// separator = "/"; +// } else { +// separator = ""; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += separator + "Wall"; +// values += separator + totalWallTime / 1000000; +// } +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "/CPU avg"; +// values += "/" + (float) (totalCpuTime) / measurements / 1000000; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += "/Wall avg"; +// values += "/" + (float) (totalWallTime) / measurements / 1000000; +// } +// if (threadCount > 1) { +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "/CPU per thread"; +// values += "/" + (float) (totalCpuTime) / threadCount / 1000000; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += "/Wall per thread"; +// values += "/" + (float) (totalWallTime) / threadCount / 1000000; +// } +// } +// +// LOGGER.info( +// "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); +// } +// +// if (isRunning) { +// LOGGER.warn("Timer " + timerLabel + " logged while it was still running"); +// } +// } +// } +// +// /** +// * Start a timer of the given string name for all todos and the current thread. +// * If no such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// */ +// public static void startNamedTimer(String timerName) { +// getNamedTimer(timerName).start(); +// } +// +// /** +// * Start a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// */ +// public static void startNamedTimer(String timerName, int todoFlags) { +// getNamedTimer(timerName, todoFlags).start(); +// } +// +// /** +// * Start a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// */ +// public static void startNamedTimer(String timerName, int todoFlags, long threadId) { +// getNamedTimer(timerName, todoFlags, threadId).start(); +// } +// +// /** +// * Stop a timer of the given string name for all todos and the current thread. +// * If no such timer exists, -1 will be returned. Otherwise the return value is +// * the CPU time that was measured. +// * +// * @param timerName the name of the timer +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName) { +// return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); +// } +// +// /** +// * Stop a timer of the given string name for the current thread. If no such +// * timer exists, -1 will be returned. Otherwise the return value is the CPU time +// * that was measured. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName, int todoFlags) { +// return stopNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); +// } +// +// /** +// * Stop a timer of the given string name for the given thread. If no such timer +// * exists, -1 will be returned. Otherwise the return value is the CPU time that +// * was measured. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { +// Timer key = new Timer(timerName, todoFlags, threadId); +// if (registeredTimers.containsKey(key)) { +// return registeredTimers.get(key).stop(); +// } else { +// return -1; +// } +// } +// +// /** +// * Reset a timer of the given string name for all todos and the current thread. +// * If no such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// */ +// public static void resetNamedTimer(String timerName) { +// getNamedTimer(timerName).reset(); +// } +// +// /** +// * Reset a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// */ +// public static void resetNamedTimer(String timerName, int todoFlags) { +// getNamedTimer(timerName, todoFlags).reset(); +// } +// +// /** +// * Reset a timer of the given string name for the given thread. If no such timer +// * exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// */ +// public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { +// getNamedTimer(timerName, todoFlags, threadId).reset(); +// } +// +// /** +// * Get a timer of the given string name that takes all possible times (todos) +// * for the current thread. If no such timer exists yet, then it will be newly +// * created. +// * +// * @param timerName the name of the timer +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName) { +// return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); +// } +// +// /** +// * Returns all registered timers +// * +// * @return an iterable collection of named timers +// */ +// public static Iterable getNamedTimers() { +// return registeredTimers.keySet(); +// } +// +// /** +// * Get a timer of the given string name and todos for the current thread. If no +// * such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName, int todoFlags) { +// return getNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); +// } +// +// /** +// * Get a timer of the given string name for the given thread. If no such timer +// * exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { +// Timer key = new Timer(timerName, todoFlags, threadId); +// Timer previous = registeredTimers.putIfAbsent(key, key); +// if (previous != null) { +// return previous; +// } +// // else +// return key; +// } +// +// /** +// * Collect the total times measured by all known named timers of the given name. +// * +// * @param timerName +// * @return timer +// */ +// public static Timer getNamedTotalTimer(String timerName) { +// long totalCpuTime = 0; +// long totalSystemTime = 0; +// int measurements = 0; +// int threadCount = 0; +// int todoFlags = RECORD_NONE; +// Timer previousTimer = null; +// for (Map.Entry entry : registeredTimers.entrySet()) { +// if (entry.getValue().name.equals(timerName)) { +// previousTimer = entry.getValue(); +// threadCount += 1; +// totalCpuTime += previousTimer.totalCpuTime; +// totalSystemTime += previousTimer.totalWallTime; +// measurements += previousTimer.measurements; +// todoFlags |= previousTimer.todoFlags; +// } +// } +// +// if (threadCount == 1) { +// return previousTimer; +// } else { +// Timer result = new Timer(timerName, todoFlags, 0); +// result.totalCpuTime = totalCpuTime; +// result.totalWallTime = totalSystemTime; +// result.measurements = measurements; +// result.threadCount = threadCount; +// return result; +// } +// } +// +// public static void logAllNamedTimers(String timerName) { +// for (Map.Entry entry : registeredTimers.entrySet()) { +// if (entry.getValue().name.equals(timerName)) { +// entry.getValue().log(); +// } +// } +// } +// +// @Override +// public int hashCode() { +// // Jenkins hash, see http://www.burtleburtle.net/bob/hash/doobs.html and also +// // http://en.wikipedia.org/wiki/Jenkins_hash_function. +// int hash = name.hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// hash += Long.valueOf(threadId).hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// hash += Integer.valueOf(todoFlags).hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// +// hash += (hash << 3); +// hash ^= (hash >> 11); +// hash += (hash << 15); +// return hash; +// } +// +// @Override +// public boolean equals(Object obj) { +// if (this == obj) { +// return true; +// } else if (obj == null) { +// return false; +// } else if (getClass() != obj.getClass()) { +// return false; +// } else if (threadId == ((Timer) obj).threadId && todoFlags == ((Timer) obj).todoFlags +// && name.equals(((Timer) obj).name)) { +// return true; +// } else { +// return false; +// } +// } protected static long getThreadCpuTime(long threadId) { if (threadId == 0) { // generally invalid From 5c6985e8159eee3a40977321457163244a342d4f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 24 Aug 2020 09:41:05 +0200 Subject: [PATCH 0980/1255] test load command --- .../commands/LoadCommandInterpreterTest.java | 123 ++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java new file mode 100644 index 000000000..5ed5a0f14 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -0,0 +1,123 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class LoadCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("p(a) .".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + + assertEquals("load", command.getName()); + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("not parsable".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseFileNotFoundError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load p(a) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new LoadCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new LoadCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 6cb4f8d6d4865792a153bcf9a7f9b8f18a272173 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 24 Aug 2020 12:51:41 +0200 Subject: [PATCH 0981/1255] typo --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index a36caba3c..0768042b6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -64,7 +64,7 @@ public void run(final CommandReader commandReader) { } } } - interpreter.printSection("Existing Rulewerk shell ... bye.\n\n"); + interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); } public void exitShell() { From 42d5fdd3d76181a0247c08ee7cce5da54e9969d0 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 24 Aug 2020 18:32:43 +0200 Subject: [PATCH 0982/1255] Handle exit request from user using CTRL+D --- .../semanticweb/rulewerk/client/shell/CommandReader.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 48a55eeea..18400e686 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.client.shell; +import org.jline.reader.EndOfFileException; + /*- * #%L * Rulewerk Client @@ -61,6 +63,10 @@ public Command readCommand() { } else { return null; // used as empty command } + } catch (final EndOfFileException e) { + // Exit request from user CTRL+D + return ExitCommandInterpreter.EXIT_COMMAND; + } readLine = readLine.trim(); From 55e4b62c263dbd9ab049259791146e7ddb62a36b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 24 Aug 2020 19:10:06 +0200 Subject: [PATCH 0983/1255] complete only command names --- .../rulewerk/client/shell/DefaultConfiguration.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 73477a47b..e573fb54c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -28,6 +28,8 @@ import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; +import org.jline.reader.impl.completer.ArgumentCompleter; +import org.jline.reader.impl.completer.NullCompleter; import org.jline.reader.impl.completer.StringsCompleter; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; @@ -59,14 +61,17 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre return lineReader; } + private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> "@" + commandName) - .collect(Collectors.toList()); - return new StringsCompleter(serializedCommandNames); + .map(commandName -> "@" + commandName).collect(Collectors.toList()); + final Completer commandNamesCompleter = new StringsCompleter(serializedCommandNames); + // do not complete command arguments + return new ArgumentCompleter(commandNamesCompleter, NullCompleter.INSTANCE); } + public static Terminal buildTerminal() throws IOException { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); } From f308c99e98d05932952ea3b3f837794141ae0a2b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 11:45:59 +0200 Subject: [PATCH 0984/1255] add file name completion for @load and @export --- .../client/shell/DefaultConfiguration.java | 32 +++++++++++-------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index e573fb54c..011a32086 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -21,16 +21,17 @@ */ import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Set; -import java.util.stream.Collectors; +import org.jline.builtins.Completers; +import org.jline.builtins.Completers.FileNameCompleter; +import org.jline.builtins.Completers.TreeCompleter; +import org.jline.builtins.Completers.TreeCompleter.Node; import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; -import org.jline.reader.impl.completer.ArgumentCompleter; -import org.jline.reader.impl.completer.NullCompleter; -import org.jline.reader.impl.completer.StringsCompleter; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; @@ -48,8 +49,7 @@ public static PromptProvider buildPromptProvider() { public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) - .appName("Rulewerk Shell") - .completer(buildCompleter(interpreter)) + .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) // .expander(expander()) // .history(buildHistory()) // .highlighter(buildHighlighter()) @@ -61,16 +61,22 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre return lineReader; } - private static Completer buildCompleter(final Interpreter interpreter) { +// @load and @export commands require a file name as argument + final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); + final Set registeredCommandNames = interpreter.getRegisteredCommands(); - final List serializedCommandNames = registeredCommandNames.stream() - .map(commandName -> "@" + commandName).collect(Collectors.toList()); - final Completer commandNamesCompleter = new StringsCompleter(serializedCommandNames); - // do not complete command arguments - return new ArgumentCompleter(commandNamesCompleter, NullCompleter.INSTANCE); - } + final List nodes = new ArrayList<>(); + registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { + if (serializedCommandName.equals("@load") || serializedCommandName.equals("@export")) { + nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); + } else { + nodes.add(TreeCompleter.node(serializedCommandName)); + } + }); + return new TreeCompleter(nodes); + } public static Terminal buildTerminal() throws IOException { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); From 1daf9bb7ffb7b316f42da65d2c9086f3bdabe6c3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 13:48:54 +0200 Subject: [PATCH 0985/1255] remove file completer for @export command --- .../semanticweb/rulewerk/client/shell/DefaultConfiguration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 011a32086..e9cf0438f 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -68,7 +68,7 @@ private static Completer buildCompleter(final Interpreter interpreter) { final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List nodes = new ArrayList<>(); registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { - if (serializedCommandName.equals("@load") || serializedCommandName.equals("@export")) { + if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); } else { nodes.add(TreeCompleter.node(serializedCommandName)); From e7b3d7c00d6b577e4d3e7affc7f1c1226588fb86 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:22:15 +0200 Subject: [PATCH 0986/1255] typos --- .../rulewerk/commands/AddSourceCommandInterpreter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index 5fe8936cd..8f8a2e4b7 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " []: .\n" + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " : a fact specifying a source declaration\n\n" + "Note that every predicate can have multiple sources.\n"); @@ -87,7 +87,7 @@ static DataSource extractDataSource(PositiveLiteral sourceDeclaration, Interpret return interpreter.getParserConfiguration() .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); } catch (ParsingException e) { - throw new CommandExecutionException("Could not parse source declartion: " + e.getMessage()); + throw new CommandExecutionException("Could not parse source declaration: " + e.getMessage()); } } From 29214ad90705694a13e51391ef6dcfde47ddbf72 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:31:18 +0200 Subject: [PATCH 0987/1255] support trident data sources --- .../DataSourceConfigurationVisitor.java | 20 +++-- .../implementation/TridentDataSource.java | 90 +++++++++++++++++++ .../parser/DefaultParserConfiguration.java | 3 + .../TridentDataSourceDeclarationHandler.java | 43 +++++++++ .../VLogDataSourceConfigurationVisitor.java | 26 +++--- 5 files changed, 166 insertions(+), 16 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java create mode 100644 rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java index 9ead436c5..91c78b4e1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -32,30 +32,38 @@ public interface DataSourceConfigurationVisitor { /** * Configure the reasoner for a {@link CsvFileDataSource}. * - * @param dataSource the data source to configure. - * @throws IOexception when an IO error occurs during configuration. + * @param dataSource the data source to configure + * @throws IOexception when an IO error occurs during configuration */ public void visit(CsvFileDataSource dataSource) throws IOException; /** * Configure the reasoner for a {@link RdfFileDataSource}. * - * @param dataSource the data source to configure. - * @throws IOexception when an IO error occurs during configuration. + * @param dataSource the data source to configure + * @throws IOexception when an IO error occurs during configuration */ public void visit(RdfFileDataSource dataSource) throws IOException; + /** + * Configure the reasoner for a {@link TridentDataSource}. + * + * @param dataSource the data source to configure + * @throws IOexception when an IO error occurs during configuration + */ + public void visit(TridentDataSource dataSource) throws IOException; + /** * Configure the reasoner for a {@link SparqlQueryResultDataSource}. * - * @param dataSource the data source to configure. + * @param dataSource the data source to configure */ public void visit(SparqlQueryResultDataSource dataSource); /** * Configure the reasoner for a {@link InMemoryDataSource}. * - * @param dataSource the data source to configure. + * @param dataSource the data source to configure */ public void visit(InMemoryDataSource dataSource); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java new file mode 100644 index 000000000..14245678a --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -0,0 +1,90 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +/** + * Data source for loading data from a database created with the + * Trident RDF indexing and + * storage utility. This is the recommended data source for large RDF + * datasets in the VLog reasoner. Trident databases are generated from RDF input + * files in a batch process using the Trident tool. + * + * @author Markus Kroetzsch + * + */ +public class TridentDataSource implements ReasonerDataSource { + + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "trident"; + + final String filePath; + + public TridentDataSource(final String filePath) { + this.filePath = filePath; + } + + public String getPath() { + return this.filePath; + } + + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate(declarationPredicateName, 1); + return Expressions.makeFact(predicate, + Expressions.makeDatatypeConstant(filePath, PrefixDeclarationRegistry.XSD_STRING)); + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { + visitor.visit(this); + + } + + @Override + public int hashCode() { + return this.filePath.hashCode(); + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof TridentDataSource)) { + return false; + } + final TridentDataSource other = (TridentDataSource) obj; + return this.filePath.equals(other.getPath()); + } + +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index d8ce99ddb..80050e49f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -3,6 +3,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; /*- * #%L @@ -27,6 +28,7 @@ import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.TridentDataSourceDeclarationHandler; import org.semanticweb.rulewerk.parser.directives.ImportFileDirectiveHandler; import org.semanticweb.rulewerk.parser.directives.ImportFileRelativeDirectiveHandler; @@ -50,6 +52,7 @@ private void registerDefaultDataSources() { registerDataSource(RdfFileDataSource.declarationPredicateName, new RdfFileDataSourceDeclarationHandler()); registerDataSource(SparqlQueryResultDataSource.declarationPredicateName, new SparqlQueryResultDataSourceDeclarationHandler()); + registerDataSource(TridentDataSource.declarationPredicateName, new TridentDataSourceDeclarationHandler()); } private void registerDefaultDirectives() { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java new file mode 100644 index 000000000..830d97c59 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java @@ -0,0 +1,43 @@ +package org.semanticweb.rulewerk.parser.datasources; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for parsing {@link TridentDataSource} declarations + * + * @author Markus Kroetzsch + */ +public class TridentDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "path to Trident database"); + + return new TridentDataSource(fileName); + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java index f5396dc24..c2bc52c08 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -29,6 +29,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; public class VLogDataSourceConfigurationVisitor implements DataSourceConfigurationVisitor { @@ -37,6 +38,7 @@ public class VLogDataSourceConfigurationVisitor implements DataSourceConfigurati private static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; private static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; private final static String FILE_DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; + private final static String TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE = "Trident"; private static final String SPARQL_DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; public String getConfigString() { @@ -44,11 +46,9 @@ public String getConfigString() { } protected void setFileConfigString(FileDataSource dataSource) throws IOException { - this.configString = - PREDICATE_NAME_CONFIG_LINE + - DATASOURCE_TYPE_CONFIG_PARAM + "=" + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + getDirCanonicalPath(dataSource) + "\n" + - "EDB%1$d_param1=" + getFileNameWithoutExtension(dataSource) + "\n"; + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + "EDB%1$d_param0=" + getDirCanonicalPath(dataSource) + "\n" + + "EDB%1$d_param1=" + getFileNameWithoutExtension(dataSource) + "\n"; } String getDirCanonicalPath(FileDataSource dataSource) throws IOException { @@ -72,11 +72,17 @@ public void visit(RdfFileDataSource dataSource) throws IOException { @Override public void visit(SparqlQueryResultDataSource dataSource) { - this.configString = - PREDICATE_NAME_CONFIG_LINE + - DATASOURCE_TYPE_CONFIG_PARAM + "=" + SPARQL_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - "EDB%1$d_param0=" + dataSource.getEndpoint() + "\n" + "EDB%1$d_param1=" + dataSource.getQueryVariables() + "\n" + - "EDB%1$d_param2=" + dataSource.getQueryBody() + "\n"; + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + + SPARQL_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + "EDB%1$d_param0=" + dataSource.getEndpoint() + "\n" + + "EDB%1$d_param1=" + dataSource.getQueryVariables() + "\n" + "EDB%1$d_param2=" + + dataSource.getQueryBody() + "\n"; + } + + @Override + public void visit(TridentDataSource dataSource) { + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" // + + TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE + "\n" // + + "EDB%1$d_param0=" + dataSource.getPath() + "\n"; } @Override From 45c6a14b9aa4b3cd256bc77a1e1232d2ab1853ab Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:45:42 +0200 Subject: [PATCH 0988/1255] test Trident source --- .../implementation/TridentDataSourceTest.java | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java new file mode 100644 index 000000000..faf6d640b --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java @@ -0,0 +1,81 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class TridentDataSourceTest { + + @Test(expected = NullPointerException.class) + public void nullFile_fails() throws IOException { + new TridentDataSource(null); + } + + @Test + public void get_succeeds() throws IOException { + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + assertEquals("trident/path", tridentDataSource.getPath()); + } + + @Test + public void getDeclarationFact_succeeds() throws IOException { + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + Fact fact = tridentDataSource.getDeclarationFact(); + assertEquals(TridentDataSource.declarationPredicateName, fact.getPredicate().getName()); + assertEquals(1, fact.getPredicate().getArity()); + assertEquals(Expressions.makeDatatypeConstant("trident/path", PrefixDeclarationRegistry.XSD_STRING), + fact.getArguments().get(0)); + } + + @Test + public void visit_succeeds() throws IOException { + final DataSourceConfigurationVisitor visitor = Mockito.spy(DataSourceConfigurationVisitor.class); + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + + tridentDataSource.accept(visitor); + + Mockito.verify(visitor).visit(tridentDataSource); + } + + @Test + public void hashEquals_succeed() throws IOException { + final TridentDataSource tridentDataSource1 = new TridentDataSource("trident/path"); + final TridentDataSource tridentDataSource2 = new TridentDataSource("trident/path"); + final TridentDataSource tridentDataSource3 = new TridentDataSource("trident/anotherpath"); + + assertEquals(tridentDataSource1, tridentDataSource2); + assertEquals(tridentDataSource1.hashCode(), tridentDataSource2.hashCode()); + assertNotEquals(tridentDataSource1, tridentDataSource3); + assertEquals(tridentDataSource1, tridentDataSource1); + assertFalse(tridentDataSource1.equals(null)); + assertFalse(tridentDataSource1.equals("trident/path")); + } +} From 91d9363513bddef9927b467c4cb551d7c1d1006f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:52:06 +0200 Subject: [PATCH 0989/1255] validate not null --- .../core/reasoner/implementation/TridentDataSource.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 14245678a..54cfba4a8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -22,6 +22,7 @@ import java.io.IOException; +import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; @@ -47,6 +48,7 @@ public class TridentDataSource implements ReasonerDataSource { final String filePath; public TridentDataSource(final String filePath) { + Validate.notBlank(filePath, "Path to Trident database cannot be blank!"); this.filePath = filePath; } @@ -64,7 +66,6 @@ public Fact getDeclarationFact() { @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); - } @Override From 877ba5deee1b214f0ea8c2570846a29cdb420b16 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:52:18 +0200 Subject: [PATCH 0990/1255] test trident source parsing --- .../parser/RuleParserDataSourceTest.java | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 1a376bc3e..8e0c0abb4 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -31,6 +31,7 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; +import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; @@ -41,6 +42,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; @@ -50,6 +52,7 @@ public class RuleParserDataSourceTest { private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; private static final String EXAMPLE_CSV_FILE_PATH = "src/main/data/input/example.csv"; private static final String WIKIDATA_SPARQL_ENDPOINT_URI = "https://query.wikidata.org/sparql"; + private static final String EXAMPLE_TRIDENT_PATH = "src/main/data/trident"; @Test public void testCsvSource() throws ParsingException, IOException { @@ -193,4 +196,25 @@ public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws Pa RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } + @Test + public void testTridentSource_succeeds() throws ParsingException, IOException { + String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\") ."; + DataSource parsed = RuleParser.parseDataSourceDeclaration(input).getDataSource(); + TridentDataSource expected = new TridentDataSource(EXAMPLE_TRIDENT_PATH); + + assertEquals(expected, parsed); + } + + @Test(expected = ParsingException.class) + public void testTridentSourcewrongParameterCount_fails() throws ParsingException, IOException { + String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\", 42) ."; + RuleParser.parseDataSourceDeclaration(input).getDataSource(); + } + + @Test(expected = ParsingException.class) + public void testTridentSourcewrongParameterType_fails() throws ParsingException, IOException { + String input = "@source p[2] : trident(42) ."; + RuleParser.parseDataSourceDeclaration(input).getDataSource(); + } + } From 4218319d6e026ba29b99342eb11823559cb4a94e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 14:54:52 +0200 Subject: [PATCH 0991/1255] mention trident support --- RELEASE-NOTES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 71a54cce9..ec4e55973 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -7,6 +7,8 @@ Rulewerk v0.7.0 New features: * New interactive Rulewerk shell for rule reasoning from the command line client * Significant speedup in iterating over query results +* Support for using data from a Trident database, the recommended data source for large + RDF graphs in VLog * New class `LiteralQueryResultPrinter` for pretty-printing query results Other improvements: From 9e352ee772b7c9d6d192ace273f780a50106871e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 16:19:52 +0200 Subject: [PATCH 0992/1255] Tests for trident source The tests are currently disabled due to VLog bugs but can be enabled when fixed --- .../input/tridentTernaryFacts/_sample/kbstats | Bin 0 -> 140 bytes .../input/tridentTernaryFacts/_sample/p0/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p0/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p1/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p1/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p2/0 | 1 + .../tridentTernaryFacts/_sample/p2/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p3/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p3/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p4/0 | Bin 0 -> 2 bytes .../tridentTernaryFacts/_sample/p4/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/p5/0 | 1 + .../tridentTernaryFacts/_sample/p5/0.idx | Bin 0 -> 19 bytes .../input/tridentTernaryFacts/_sample/tree/0 | Bin 0 -> 12000 bytes .../tridentTernaryFacts/_sample/tree/idx | Bin 0 -> 34 bytes .../tridentTernaryFacts/_sample/tree/tree | Bin 0 -> 16 bytes .../data/input/tridentTernaryFacts/dict/0/0 | Bin 0 -> 23 bytes .../data/input/tridentTernaryFacts/dict/0/idx | Bin 0 -> 34 bytes .../data/input/tridentTernaryFacts/dict/0/sb | Bin 0 -> 59 bytes .../input/tridentTernaryFacts/dict/0/sb.idx | Bin 0 -> 16 bytes .../input/tridentTernaryFacts/dict/0/tree | Bin 0 -> 16 bytes .../input/tridentTernaryFacts/invdict/0/0 | Bin 0 -> 18 bytes .../input/tridentTernaryFacts/invdict/0/idx | Bin 0 -> 34 bytes .../input/tridentTernaryFacts/invdict/0/tree | Bin 0 -> 16 bytes .../data/input/tridentTernaryFacts/kbstats | Bin 0 -> 140 bytes .../test/data/input/tridentTernaryFacts/p0/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p0/0.idx | Bin 0 -> 19 bytes .../test/data/input/tridentTernaryFacts/p1/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p1/0.idx | Bin 0 -> 30 bytes .../test/data/input/tridentTernaryFacts/p2/0 | 1 + .../data/input/tridentTernaryFacts/p2/0.idx | Bin 0 -> 30 bytes .../test/data/input/tridentTernaryFacts/p3/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p3/0.idx | Bin 0 -> 19 bytes .../test/data/input/tridentTernaryFacts/p4/0 | Bin 0 -> 4 bytes .../data/input/tridentTernaryFacts/p4/0.idx | Bin 0 -> 30 bytes .../test/data/input/tridentTernaryFacts/p5/0 | 1 + .../data/input/tridentTernaryFacts/p5/0.idx | Bin 0 -> 30 bytes .../data/input/tridentTernaryFacts/tree/0 | Bin 0 -> 12000 bytes .../data/input/tridentTernaryFacts/tree/idx | Bin 0 -> 34 bytes .../data/input/tridentTernaryFacts/tree/tree | Bin 0 -> 16 bytes .../vlog/VLogReasonerTridentInput.java | 98 ++++++++++++++++++ 41 files changed, 102 insertions(+) create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/tree create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx create mode 100644 rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats new file mode 100644 index 0000000000000000000000000000000000000000..4fecafaf21427268bc0b14646a99f99536b7edb8 GIT binary patch literal 140 YcmZQz00Tw{1t*dDY;aKqm@sW<00vY56#xJL literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 new file mode 100644 index 0000000000000000000000000000000000000000..d6db588e88905ed0aaaf65a947716182301341c9 GIT binary patch literal 2 JcmZQz0RR9700jU5 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 new file mode 100644 index 0000000000000000000000000000000000000000..a903574af00b573ad9bdb2bccf8d93ed00c675de GIT binary patch literal 2 JcmZQz1^@sB00aO4 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..ed5f2a0b2a5a51c51de25b0024a87ed0a5c9d3bb GIT binary patch literal 19 QcmZQz00Tw{1teJ-005@|h5!Hn literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 new file mode 100644 index 000000000..6bebb85a7 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..587e420f8241bce503556ed9ef553e2cb808626b GIT binary patch literal 19 PcmZQz00Tw{1tJ>&0G|MY literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 new file mode 100644 index 0000000000000000000000000000000000000000..d825e1ad776558a390c09389f5b2ce26cd573be3 GIT binary patch literal 2 JcmZQ!0000A00jU5 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 new file mode 100644 index 0000000000000000000000000000000000000000..15294a501aa6e73201b85ff460b2fcf0adb11e48 GIT binary patch literal 2 JcmZQ(0000800aO4 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..ed5f2a0b2a5a51c51de25b0024a87ed0a5c9d3bb GIT binary patch literal 19 QcmZQz00Tw{1teJ-005@|h5!Hn literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 new file mode 100644 index 000000000..938838043 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..587e420f8241bce503556ed9ef553e2cb808626b GIT binary patch literal 19 PcmZQz00Tw{1tJ>&0G|MY literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 new file mode 100644 index 0000000000000000000000000000000000000000..15b24cdc32e3343b105e3df38b18498060b9a638 GIT binary patch literal 12000 zcmeIuF$%yS5Cp&#Z3HVxnKBQ})04)|(sB-#gJr8pQnRkMr_Z#ETz_?!eF6js z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNA}O5g*5 Chy#!S literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx new file mode 100644 index 0000000000000000000000000000000000000000..6fce09e5e30ac49abd9a3dbcd05ac96d0b976820 GIT binary patch literal 34 XcmZQzU|?imVBiE17{G*qLGJ+o1C9Ys literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree new file mode 100644 index 0000000000000000000000000000000000000000..c9426b2a5e5c2105de52e6196409e5222396a68b GIT binary patch literal 16 LcmZQzKmm*Z01*HJ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 new file mode 100644 index 0000000000000000000000000000000000000000..52c0cfce6fc7cd1c7f89c808e71a0ef13120e463 GIT binary patch literal 23 ZcmZQzU|?l{038NZMmY%v7G@?!1^@-d0GI#( literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx new file mode 100644 index 0000000000000000000000000000000000000000..1e2cec818bf1b680b251552cbab62ce7de0d0b9a GIT binary patch literal 34 WcmZQzU|?imVBiE17(g5-A`Son*Z?{J literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb new file mode 100644 index 0000000000000000000000000000000000000000..4c0edb8f3ac850c4e6b582d7a193cf6ca1b2a01b GIT binary patch literal 59 zcmew)D#l=wQBqQ1rLUh_k(gVMld6|rl&)WBC&tJmQeY+VL_dm82Gz NC={0zW#*+T0RRNA5WoNc literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx new file mode 100644 index 0000000000000000000000000000000000000000..8e4251486dfcdcc7343027358ae26547c08f355e GIT binary patch literal 16 NcmcC!fBdx&SZ$ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree new file mode 100644 index 0000000000000000000000000000000000000000..c9426b2a5e5c2105de52e6196409e5222396a68b GIT binary patch literal 16 LcmZQzKmm*Z01*HJ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats new file mode 100644 index 0000000000000000000000000000000000000000..1ee9a05369656705c413eff78239bc5bf4f6d286 GIT binary patch literal 140 gcmZQz00Tw{#lQ-snUMHwP&SZ)DT7H9qDfN^01YPqDgXcg literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 new file mode 100644 index 0000000000000000000000000000000000000000..36efe2295040d70ebc3425fde69b4da7fa0d2642 GIT binary patch literal 4 LcmZQzVPpaT02BZS literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 new file mode 100644 index 0000000000000000000000000000000000000000..9c47accf380470c3e2d170febfbe9c5135850c2c GIT binary patch literal 4 LcmZQ%W?%*Y02BZS literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..28fe596360999e93dfc88b2edc2c7c6bd13961c2 GIT binary patch literal 30 WcmZQz00Sln1tggoKuj>7r2zm2`~eF9 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 new file mode 100644 index 000000000..fe6a93a9d --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..b5b5e052cb82ef1d19d680d81225082613236ff5 GIT binary patch literal 30 VcmZQz00Sln1tJ?j1enj*000Ku0R{j7 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 new file mode 100644 index 0000000000000000000000000000000000000000..51ab423f013f2a85c59b8a0fa60aface206a921e GIT binary patch literal 4 LcmZQ#WMKdR02crV literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..e80ed9ccb03bbd79472f326ba30480cc96bf9f6c GIT binary patch literal 19 QcmZQz00Tw{1tggp005-`g#Z8m literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 new file mode 100644 index 0000000000000000000000000000000000000000..01cbafe4da82ee422876b6f2ea19426637da1da3 GIT binary patch literal 4 LcmZQ(WM%*W02u%X literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..28fe596360999e93dfc88b2edc2c7c6bd13961c2 GIT binary patch literal 30 WcmZQz00Sln1tggoKuj>7r2zm2`~eF9 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 new file mode 100644 index 000000000..7926fc053 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx new file mode 100644 index 0000000000000000000000000000000000000000..b5b5e052cb82ef1d19d680d81225082613236ff5 GIT binary patch literal 30 VcmZQz00Sln1tJ?j1enj*000Ku0R{j7 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 new file mode 100644 index 0000000000000000000000000000000000000000..0635f5470e1e88ec8490c8cfc47157468df04c9d GIT binary patch literal 12000 zcmeIuF$#b%5Cp)DXdzgMrDaMV>C;V!e!zMTmgTOvVv;O*r_{Qyd**C8a;CQPRg!z8 zwC)-E)VgQf%KiP&1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly RK!5-N0t5&UAVA=czynW21{?qY literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx new file mode 100644 index 0000000000000000000000000000000000000000..f8cf365b4f6a453048cc7fb3372d6815fe549f22 GIT binary patch literal 34 XcmZQzU|?imVBiE17{G&pLGJ+o1M&f4 literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree new file mode 100644 index 0000000000000000000000000000000000000000..c9426b2a5e5c2105de52e6196409e5222396a68b GIT binary patch literal 16 LcmZQzKmm*Z01*HJ literal 0 HcmV?d00001 diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java new file mode 100644 index 000000000..75e433e1a --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java @@ -0,0 +1,98 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; + +public class VLogReasonerTridentInput { + + private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); + private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeUniversalVariable("o")); + + @SuppressWarnings("unchecked") + private static final Set> expectedTernaryQueryResult = Sets.newSet( + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/p"), + Expressions.makeAbstractConstant("http://example.org/c2")), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + + @Ignore + // Test fails, see https://github.com/karmaresearch/vlog/issues/55 + @Test + public void testLoadTernaryFactsFromSingleRdfDataSource() throws IOException { + final DataSource fileDataSource = new TridentDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + "tridentTernaryFacts"); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + + assertEquals(expectedTernaryQueryResult, queryResult); + } + } + + @Ignore + // Test fails, see https://github.com/karmaresearch/vlog/issues/56 + @Test(expected = IOException.class) + public void tridentDbDoesNotExist_fails() throws IOException { + final File nonexistingFile = new File("nonexisting"); + assertFalse(nonexistingFile.exists()); + final DataSource dataSource = new TridentDataSource(nonexistingFile.getName()); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + } + } + +} From 27d823062724c976f05841e401f0c2e6b8cd9577 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 16:22:49 +0200 Subject: [PATCH 0993/1255] remove unused imports --- .../rulewerk/reasoner/vlog/VLogReasonerTridentInput.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java index 75e433e1a..9cf044481 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java @@ -40,8 +40,6 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; -import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; public class VLogReasonerTridentInput { From a2de5b03180ba27019e59d8a2f6a51f3a037a4e3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 17:25:16 +0200 Subject: [PATCH 0994/1255] removed PromptProvider interface to simplify code --- .../rulewerk/client/shell/CommandReader.java | 9 +++--- .../client/shell/DefaultConfiguration.java | 5 ++-- .../client/shell/InteractiveShell.java | 3 +- .../rulewerk/client/shell/PromptProvider.java | 28 ------------------- 4 files changed, 9 insertions(+), 36 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 18400e686..5871389be 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -33,13 +33,13 @@ public class CommandReader { private final LineReader lineReader; - private final PromptProvider promptProvider; + private final AttributedString prompt; private final Interpreter interpreter; - public CommandReader(final LineReader lineReader, final PromptProvider promptProvider, + public CommandReader(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { this.lineReader = lineReader; - this.promptProvider = promptProvider; + this.prompt = prompt; this.interpreter = interpreter; } @@ -54,8 +54,7 @@ public CommandReader(final LineReader lineReader, final PromptProvider promptPro public Command readCommand() { String readLine; try { - final AttributedString prompt = this.promptProvider.getPrompt(); - readLine = this.lineReader.readLine(prompt.toAnsi(this.lineReader.getTerminal())); + readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index e9cf0438f..0a5a2c688 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -43,10 +43,11 @@ public final class DefaultConfiguration { private DefaultConfiguration() { } - public static PromptProvider buildPromptProvider() { - return () -> new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); + public static AttributedString buildPromptProvider() { + return new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); } + public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 0bee3a90c..877cd19ad 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -24,6 +24,7 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -51,7 +52,7 @@ public static void run() throws IOException { final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final PromptProvider promptProvider = DefaultConfiguration.buildPromptProvider(); + final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); shell.run(commandReader); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java deleted file mode 100644 index ff5fd6ea4..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/PromptProvider.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.jline.utils.AttributedString; - -public interface PromptProvider { - - AttributedString getPrompt(); -} From b52726e1ad0c784d19e2b7751156bfe216cf8c12 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 17:25:33 +0200 Subject: [PATCH 0995/1255] support loading OWL files --- rulewerk-commands/pom.xml | 10 +++ .../commands/LoadCommandInterpreter.java | 75 ++++++++++++++++++- .../src/test/data/loadtest-fails.owl | 1 + rulewerk-commands/src/test/data/loadtest.owl | 3 + .../commands/LoadCommandInterpreterTest.java | 56 ++++++++++++++ 5 files changed, 142 insertions(+), 3 deletions(-) create mode 100644 rulewerk-commands/src/test/data/loadtest-fails.owl create mode 100644 rulewerk-commands/src/test/data/loadtest.owl diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 9e4eb1374..97509c6c6 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -27,5 +27,15 @@ rulewerk-parser ${project.version} + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index b8b74b0b8..5d245cc4e 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.commands; +import java.io.File; + /*- * #%L * Rulewerk Core Components @@ -23,17 +25,62 @@ import java.io.FileNotFoundException; import java.io.InputStream; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; +/** + * Interpreter for the load command. + * + * @author Markus Kroetzsch + * + */ public class LoadCommandInterpreter implements CommandInterpreter { + static final String TASK_RLS = "RULES"; + static final String TASK_OWL = "OWL"; + static final String TASK_RDF = "RDF"; + @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - Interpreter.validateArgumentCount(command, 1); - String fileName = Interpreter.extractStringArgument(command, 0, "filename"); + String task; + int pos = 0; + if (command.getArguments().size() > 0 && command.getArguments().get(0).fromTerm().isPresent() + && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + task = Interpreter.extractNameArgument(command, 0, "task"); + Interpreter.validateArgumentCount(command, 2); + pos++; + } else { + task = TASK_RLS; + Interpreter.validateArgumentCount(command, 1); + } + + String fileName = Interpreter.extractStringArgument(command, pos, "filename"); + + int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); + int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); + + if (TASK_RLS.equals(task)) { + loadKb(interpreter, fileName); + } else if (TASK_OWL.equals(task)) { + loadOwl(interpreter, fileName); + } else { + throw new CommandExecutionException("Unknown task " + task + ". Should be " + TASK_RLS + " or " + TASK_OWL); + } + interpreter.printNormal( + "Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + " new fact(s) and " + + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s)\n"); + + } + + private void loadKb(Interpreter interpreter, String fileName) throws CommandExecutionException { try { InputStream inputStream = interpreter.getFileInputStream(fileName); RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); @@ -44,9 +91,31 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } + private void loadOwl(Interpreter interpreter, String fileName) throws CommandExecutionException { + final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + OWLOntology ontology; + try { + ontology = ontologyManager.loadOntologyFromOntologyDocument(new File(fileName)); + } catch (OWLOntologyCreationException e) { + throw new CommandExecutionException("Problem loading OWL ontology: " + e.getMessage(), e); + } + interpreter.printNormal( + "Found OWL ontology with " + ontology.getLogicalAxiomCount() + " logical OWL axioms ...\n"); + + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + owlToRulesConverter.addOntology(ontology); + + interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getRules()); + interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); + } + @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " \n" + " file: path to a Rulewerk rls file\n"); + interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // + + " file: path to the file to load\n" // + + " TASK: optional; one of RULES (default) or OWL:\n" // + + " RULES to load a knowledge base in Rulewerk rls format\n" // + + " OWL to load an OWL ontology and convert it to rules\n"); } @Override diff --git a/rulewerk-commands/src/test/data/loadtest-fails.owl b/rulewerk-commands/src/test/data/loadtest-fails.owl new file mode 100644 index 000000000..3c8a426bb --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest-fails.owl @@ -0,0 +1 @@ +this is not a valid OWL file diff --git a/rulewerk-commands/src/test/data/loadtest.owl b/rulewerk-commands/src/test/data/loadtest.owl new file mode 100644 index 000000000..3c107a48f --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.owl @@ -0,0 +1,3 @@ +@prefix : . + +:a rdf:type owl:NamedIndividual, :C . diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 5ed5a0f14..92613aaca 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -65,6 +65,53 @@ public void correctUse_succeeds() throws ParsingException, CommandExecutionExcep assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + @Test + public void correctUseWithRulesTask_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("p(a) .".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load RULES 'loadtest.rls' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/C", 1); + Term term = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest.owl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithOwlTask_malformedOwl_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-fails.owl' ."); + interpreter.runCommand(command); + } + @Test(expected = CommandExecutionException.class) public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); @@ -106,6 +153,15 @@ public void wrongArgumentType_fails() throws ParsingException, CommandExecutionE interpreter.runCommand(command); } + @Test(expected = CommandExecutionException.class) + public void wrongTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load UNKOWNTASK 'loadtest.rls' ."); + interpreter.runCommand(command); + } + @Test public void help_succeeds() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); From c2dbe2a0ced6947e17fe669f76c8b658abe4489b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 17:33:30 +0200 Subject: [PATCH 0996/1255] use printNormal method for printing parsing error messages --- .../semanticweb/rulewerk/client/shell/CommandReader.java | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java index 5871389be..298127a95 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java @@ -36,8 +36,7 @@ public class CommandReader { private final AttributedString prompt; private final Interpreter interpreter; - public CommandReader(final LineReader lineReader, final AttributedString prompt, - final Interpreter interpreter) { + public CommandReader(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { this.lineReader = lineReader; this.prompt = prompt; this.interpreter = interpreter; @@ -82,9 +81,8 @@ public Command readCommand() { try { return this.interpreter.parseCommand(readLine); } catch (final ParsingException e) { - // FIXME do I need to flush terminal? - this.lineReader.getTerminal().writer() - .println("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + return null; } } From 118f50d8dc86e5bbbdb48e0123946b9b384bf260 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 18:11:45 +0200 Subject: [PATCH 0997/1255] better handling of unsupported OWL axioms --- .../commands/LoadCommandInterpreter.java | 15 +++- .../src/test/data/loadtest-unsupported.owl | 5 ++ rulewerk-commands/src/test/data/loadtest.owl | 3 +- .../commands/LoadCommandInterpreterTest.java | 19 +++++ .../rulewerk/owlapi/OwlToRulesConverter.java | 72 ++++++++++++++++++- 5 files changed, 110 insertions(+), 4 deletions(-) create mode 100644 rulewerk-commands/src/test/data/loadtest-unsupported.owl diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 5d245cc4e..3dda339e4 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -76,7 +76,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.printNormal( "Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + " new fact(s) and " - + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s)\n"); + + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s).\n"); } @@ -102,8 +102,19 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.printNormal( "Found OWL ontology with " + ontology.getLogicalAxiomCount() + " logical OWL axioms ...\n"); - final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(false); owlToRulesConverter.addOntology(ontology); + if (owlToRulesConverter.getUnsupportedAxiomsCount() > 0) { + interpreter.printImportant("Warning: Some OWL axioms could not be converted to rules.\n"); + owlToRulesConverter.getUnsupportedAxiomsSample() + .forEach((owlAxiom) -> interpreter.printNormal(owlAxiom.toString() + "\n")); + if (owlToRulesConverter.getUnsupportedAxiomsSample().size() < owlToRulesConverter + .getUnsupportedAxiomsCount()) { + interpreter.printNormal("...\n"); + } + interpreter.printNormal("Encountered " + owlToRulesConverter.getUnsupportedAxiomsCount() + + " unsupported logical axioms in total.\n"); + } interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getRules()); interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); diff --git a/rulewerk-commands/src/test/data/loadtest-unsupported.owl b/rulewerk-commands/src/test/data/loadtest-unsupported.owl new file mode 100644 index 000000000..405a2c4a5 --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest-unsupported.owl @@ -0,0 +1,5 @@ +@prefix : . +@prefix owl: . + +:a a owl:NamedIndividual, :C . +:p a owl:ObjectProperty, owl:InverseFunctionalProperty . \ No newline at end of file diff --git a/rulewerk-commands/src/test/data/loadtest.owl b/rulewerk-commands/src/test/data/loadtest.owl index 3c107a48f..9b9d9f270 100644 --- a/rulewerk-commands/src/test/data/loadtest.owl +++ b/rulewerk-commands/src/test/data/loadtest.owl @@ -1,3 +1,4 @@ @prefix : . +@prefix owl: . -:a rdf:type owl:NamedIndividual, :C . +:a a owl:NamedIndividual, :C . diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 92613aaca..b29f99f14 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -101,6 +101,25 @@ public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExe assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + + @Test + public void correctUseWithOwlTask_UnsupportedAxioms_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/C", 1); + Term term = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-unsupported.owl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + // OUtput mentions the offending axiom in Functional-Style Syntax: + assertTrue(writer.toString().contains("InverseFunctionalObjectProperty()")); + } @Test(expected = CommandExecutionException.class) public void correctUseWithOwlTask_malformedOwl_fails() diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index f13f724fe..ec8c61a55 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -1,5 +1,8 @@ package org.semanticweb.rulewerk.owlapi; +import java.util.ArrayList; +import java.util.List; + /*- * #%L * Rulewerk OWL API Support @@ -22,9 +25,12 @@ import java.util.Set; +import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for converting OWL ontologies to rules. @@ -34,8 +40,35 @@ */ public class OwlToRulesConverter { + private static Logger LOGGER = LoggerFactory.getLogger(OwlToRulesConverter.class); + final OwlAxiomToRulesConverter owlAxiomToRulesConverter = new OwlAxiomToRulesConverter(); + private final boolean failOnUnsupported; + private int unsupportedAxiomsCount = 0; + private final List unsupportedAxioms = new ArrayList<>(); + + /** + * Constructor. + * + * @param failOnUnsupported whether the converter should fail with an + * {@link OwlFeatureNotSupportedException} when + * encountering axioms that cannot be converted to + * rules or facts. + */ + public OwlToRulesConverter(boolean failOnUnsupported) { + this.failOnUnsupported = failOnUnsupported; + } + + /** + * Constructs an object that fails with a + * {@link OwlFeatureNotSupportedException} when encountering axioms that cannot + * be converted to rules or facts. + */ + public OwlToRulesConverter() { + this(true); + } + /** * Converts the given OWL ontology to rules and facts, and adds the result to * the internal buffer of rules and facts for later retrieval. @@ -44,7 +77,22 @@ public class OwlToRulesConverter { */ public void addOntology(final OWLOntology owlOntology) { this.owlAxiomToRulesConverter.startNewBlankNodeContext(); - owlOntology.axioms().forEach(owlAxiom -> owlAxiom.accept(this.owlAxiomToRulesConverter)); + owlOntology.axioms().forEach(owlAxiom -> { + try { + owlAxiom.accept(this.owlAxiomToRulesConverter); + } catch (OwlFeatureNotSupportedException e) { + if (failOnUnsupported) { + LOGGER.error(e.getMessage()); + throw e; + } else { + LOGGER.warn(e.getMessage()); + unsupportedAxiomsCount++; + if (unsupportedAxioms.size() < 10) { + unsupportedAxioms.add(owlAxiom); + } + } + } + }); } /** @@ -69,4 +117,26 @@ public Set getRules() { return this.owlAxiomToRulesConverter.rules; } + /** + * Returns the number of OWL axioms that could not be converted into rules. This + * number is only computed if the object is not configured to fail when + * encountering the first unsupported axiom. + * + * @return total number of unsupported axioms + */ + public int getUnsupportedAxiomsCount() { + return unsupportedAxiomsCount; + } + + /** + * Returns up to 10 unsupported axioms encountered during the conversion. The + * complete number of unsupported axioms can be queried using + * {@link #getUnsupportedAxiomsCount()}. + * + * @return list of first ten unsupported axioms that were encountered + */ + public List getUnsupportedAxiomsSample() { + return unsupportedAxioms; + } + } From b95deec26a1ccc6575705eb689394113f94a6577 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 18:13:56 +0200 Subject: [PATCH 0998/1255] also test handling of missing file --- .../rulewerk/commands/LoadCommandInterpreterTest.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index b29f99f14..52daa0c6a 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -130,6 +130,16 @@ public void correctUseWithOwlTask_malformedOwl_fails() Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-fails.owl' ."); interpreter.runCommand(command); } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithOwlTask_missingFile_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/file-does-not-exist.owl' ."); + interpreter.runCommand(command); + } @Test(expected = CommandExecutionException.class) public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { From e3db16cd71a0a8ee871a4214d119830cbca0eafa Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 Aug 2020 18:23:44 +0200 Subject: [PATCH 0999/1255] simplify code remove CommandReader --- .../rulewerk/client/shell/CommandReader.java | 102 ------------------ .../client/shell/InteractiveShell.java | 5 +- .../rulewerk/client/shell/Shell.java | 100 ++++++++++++----- 3 files changed, 74 insertions(+), 133 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java deleted file mode 100644 index 298127a95..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/CommandReader.java +++ /dev/null @@ -1,102 +0,0 @@ -package org.semanticweb.rulewerk.client.shell; - -import org.jline.reader.EndOfFileException; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; -import org.jline.utils.AttributedString; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class CommandReader { - - private final LineReader lineReader; - private final AttributedString prompt; - private final Interpreter interpreter; - - public CommandReader(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { - this.lineReader = lineReader; - this.prompt = prompt; - this.interpreter = interpreter; - } - - /** - * Reads a command from the prompt and returns a corresponding {@link Command} - * object. If no command should be executed, null is returned. Some effort is - * made to interpret mistyped commands by adding @ and . before and after the - * input, if forgotten. - * - * @return command or null - */ - public Command readCommand() { - String readLine; - try { - readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); - } catch (final UserInterruptException e) { - if (e.getPartialLine().isEmpty()) { - // Exit request from user CTRL+C - return ExitCommandInterpreter.EXIT_COMMAND; - } else { - return null; // used as empty command - } - } catch (final EndOfFileException e) { - // Exit request from user CTRL+D - return ExitCommandInterpreter.EXIT_COMMAND; - - } - - readLine = readLine.trim(); - if ("".equals(readLine)) { - return null; - } - if (readLine.charAt(0) != '@') { - readLine = "@" + readLine; - } - if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + " ."; - } - - try { - return this.interpreter.parseCommand(readLine); - } catch (final ParsingException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); - - return null; - } - } - -// /** -// * Sanitize the buffer input given the customizations applied to the JLine -// * parser (e.g. support for line continuations, etc.) -// */ -// static List sanitizeInput(List words) { -// words = words.stream().map(s -> s.replaceAll("^\\n+|\\n+$", "")) // CR at beginning/end of line introduced by -// // backslash continuation -// .map(s -> s.replaceAll("\\n+", " ")) // CR in middle of word introduced by return inside a quoted string -// .collect(Collectors.toList()); -// return words; -// } - -} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 877cd19ad..478299580 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -49,13 +49,12 @@ public static void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); final Interpreter interpreter = initializeInterpreter(terminal); - final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); - final CommandReader commandReader = new CommandReader(lineReader, promptProvider, interpreter); - shell.run(commandReader); + final Shell shell = new Shell(lineReader, promptProvider, interpreter); + shell.run(); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 0768042b6..8754598de 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,5 +1,10 @@ package org.semanticweb.rulewerk.client.shell; +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.jline.utils.AttributedString; + /*- * #%L * Rulewerk Client @@ -26,32 +31,37 @@ import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; public class Shell { private final Interpreter interpreter; + private final LineReader lineReader; + private final AttributedString prompt; boolean running; - public Shell(final Interpreter interpreter) { + public Shell(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { + this.lineReader = lineReader; + this.prompt = prompt; this.interpreter = interpreter; - CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); + final CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); } } - public void run(final CommandReader commandReader) { - printWelcome(); + public void run() { + this.printWelcome(); - running = true; - while (running) { + this.running = true; + while (this.running) { final Command command; try { - command = commandReader.readCommand(); + command = this.readCommand(); } catch (final Exception e) { - interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); + this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); e.printStackTrace(); continue; } @@ -60,11 +70,56 @@ public void run(final CommandReader commandReader) { try { this.interpreter.runCommand(command); } catch (final CommandExecutionException e) { - interpreter.printNormal("Error: " + e.getMessage() + "\n"); + this.interpreter.printNormal("Error: " + e.getMessage() + "\n"); } } } - interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + this.interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + + /** + * Reads a command from the prompt and returns a corresponding {@link Command} + * object. If no command should be executed, null is returned. Some effort is + * made to interpret mistyped commands by adding @ and . before and after the + * input, if forgotten. + * + * @return command or null + */ + public Command readCommand() { + String readLine; + try { + readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); + } catch (final UserInterruptException e) { + if (e.getPartialLine().isEmpty()) { + // Exit request from user CTRL+C + return ExitCommandInterpreter.EXIT_COMMAND; + } else { + return null; // used as empty command + } + } catch (final EndOfFileException e) { + // Exit request from user CTRL+D + return ExitCommandInterpreter.EXIT_COMMAND; + + } + + readLine = readLine.trim(); + if ("".equals(readLine)) { + return null; + } + if (readLine.charAt(0) != '@') { + readLine = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + readLine = readLine + " ."; + } + + try { + return this.interpreter.parseCommand(readLine); + } catch (final ParsingException e) { + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + + return null; + } } public void exitShell() { @@ -72,24 +127,13 @@ public void exitShell() { } private void printWelcome() { - interpreter.printNormal("\n"); - interpreter.printSection("Welcome to the Rulewerk interactive shell.\n"); - interpreter.printNormal("For further information, type "); - interpreter.printCode("@help."); - interpreter.printNormal(" To quit, type "); - interpreter.printCode("@exit.\n"); - interpreter.printNormal("\n"); + this.interpreter.printNormal("\n"); + this.interpreter.printSection("Welcome to the Rulewerk interactive shell.\n"); + this.interpreter.printNormal("For further information, type "); + this.interpreter.printCode("@help."); + this.interpreter.printNormal(" To quit, type "); + this.interpreter.printCode("@exit.\n"); + this.interpreter.printNormal("\n"); } -// @Override -// public void handleResult(final Object result) { -// this.terminal.writer().println(result); -// this.terminal.writer().flush(); -// } - -// @Override -// public void handleResult(final AttributedCharSequence result) { -// this.terminal.writer().println(result.toAnsi(this.terminal)); -// this.terminal.writer().flush(); -// } } From 90fbe1ad895eb21bcf0a3566d44a2bbe9d7bc4a4 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 25 Aug 2020 22:59:25 +0200 Subject: [PATCH 1000/1255] handle plain "string" constants these were accidentally converted to abstract constants before; xsd:string seems more appropriate --- .../vlog/VLogFastQueryResultIterator.java | 4 ++-- .../reasoner/vlog/VLogToModelConverter.java | 11 ++++++++++- .../vlog/VLogToModelConverterTest.java | 19 ++++++++----------- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java index 17acf658e..33e88a5d6 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -117,8 +117,8 @@ public void put(long id, Term term) { */ private final VLog vLog; /** - * VLog ids of the previous tuple, with the last id fixed to -1 (since it is never - * useful in caching). + * VLog ids of the previous tuple, with the last id fixed to -1 (since it is + * never useful in caching). */ private long[] prevIds = null; /** diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java index b237dc947..0824c0d73 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -23,7 +23,9 @@ import java.util.ArrayList; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; @@ -117,8 +119,15 @@ static Constant toConstant(String vLogConstantName) { final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); final String string = vLogConstantName.substring(1, startTypeIdx - 1); constant = new LanguageStringConstantImpl(string, languageTag); + } else if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '"' + && vLogConstantName.length() > 1) { + // This is already an unexpceted case. Untyped strings "constant" should not + // occur. But if they do, this is our best guess on how to interpret them. + constant = new DatatypeConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1), + PrefixDeclarationRegistry.XSD_STRING); } else { - constant = new AbstractConstantImpl(vLogConstantName); + throw new RulewerkRuntimeException("VLog returned a constant name '" + vLogConstantName + + "' that Rulewerk cannot make sense of."); } } } else { diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java index 2e2db1b5c..3f0dd88a5 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java @@ -20,10 +20,8 @@ * #L% */ import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; @@ -34,7 +32,8 @@ public class VLogToModelConverterTest { @Test public void testAbstractConstantConversion() { - final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "c"); final Term rulewerkTerm = new AbstractConstantImpl("c"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(rulewerkTerm, convertedTerm); @@ -69,7 +68,8 @@ public void testLanguageStringConversion() { @Test public void testNamedNullConversion() { - final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "_123"); final Term rulewerkTerm = new NamedNullImpl("_123"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(rulewerkTerm, convertedTerm); @@ -77,18 +77,15 @@ public void testNamedNullConversion() { @Test(expected = IllegalArgumentException.class) public void testVariableConversion() { - final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "X"); VLogToModelConverter.toTerm(vLogTerm); } - @Test + @Test(expected = RuntimeException.class) public void testAbstractConstantContainingQuoteExpression() { final String constName = "\""; - final Term convertedTerm = VLogToModelConverter - .toTerm(new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, constName)); - assertTrue(convertedTerm.isConstant()); - assertTrue(convertedTerm instanceof AbstractConstant); - assertEquals(constName, convertedTerm.getName()); + VLogToModelConverter.toTerm(new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, constName)); } } From 0e49429ab6cf71936f8763a223b10ab7946a459b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 09:59:55 +0200 Subject: [PATCH 1001/1255] support loading RDF files --- .../commands/LoadCommandInterpreter.java | 71 ++++++++++- rulewerk-commands/src/test/data/loadtest.nt | 1 + rulewerk-commands/src/test/data/loadtest.rdf | 9 ++ rulewerk-commands/src/test/data/loadtest.ttl | 3 + .../commands/LoadCommandInterpreterTest.java | 112 +++++++++++++++--- rulewerk-rdf/pom.xml | 1 - 6 files changed, 180 insertions(+), 17 deletions(-) create mode 100644 rulewerk-commands/src/test/data/loadtest.nt create mode 100644 rulewerk-commands/src/test/data/loadtest.rdf create mode 100644 rulewerk-commands/src/test/data/loadtest.ttl diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 3dda339e4..56ed2d104 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -23,17 +23,33 @@ */ import java.io.FileNotFoundException; +import java.io.IOException; import java.io.InputStream; - +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.openrdf.model.Model; +import org.openrdf.model.Namespace; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** * Interpreter for the load command. @@ -70,6 +86,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio loadKb(interpreter, fileName); } else if (TASK_OWL.equals(task)) { loadOwl(interpreter, fileName); + } else if (TASK_RDF.equals(task)) { + loadRdf(interpreter, fileName); } else { throw new CommandExecutionException("Unknown task " + task + ". Should be " + TASK_RLS + " or " + TASK_OWL); } @@ -87,7 +105,7 @@ private void loadKb(Interpreter interpreter, String fileName) throws CommandExec } catch (FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (ParsingException e) { - throw new CommandExecutionException("Error parsing file: " + e.getMessage(), e); + throw new CommandExecutionException("Failed to parse Rulewerk file: " + e.getMessage(), e); } } @@ -120,6 +138,55 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); } + private void loadRdf(Interpreter interpreter, String fileName) throws CommandExecutionException { + try { + String baseIri = new File(fileName).toURI().toString(); + + Iterator formatsToTry = Arrays.asList(RDFFormat.NTRIPLES, RDFFormat.TURTLE, RDFFormat.RDFXML) + .iterator(); + Model model = null; + List parseErrors = new ArrayList<>(); + while (model == null && formatsToTry.hasNext()) { + RDFFormat rdfFormat = formatsToTry.next(); + try { + InputStream inputStream = interpreter.getFileInputStream(fileName); + model = parseRdfFromStream(inputStream, rdfFormat, baseIri); + interpreter.printNormal("Found RDF document in format " + rdfFormat.getName() + " ...\n"); + } catch (RDFParseException | RDFHandlerException e) { + parseErrors.add("Failed to parse as " + rdfFormat.getName() + ": " + e.getMessage()); + } + } + if (model == null) { + String message = "Failed to parse RDF input:"; + for (String error : parseErrors) { + message += "\n " + error; + } + throw new CommandExecutionException(message); + } + + interpreter.getKnowledgeBase().addStatements(RdfModelConverter.rdfModelToFacts(model)); + for (Namespace namespace : model.getNamespaces()) { + try { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry() + .setPrefixIri(namespace.getPrefix() + ":", namespace.getName()); + } catch (PrefixDeclarationException e) { + // ignore this prefix + } + } + } catch (IOException e) { + throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); + } + } + + private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, String baseIri) + throws RDFParseException, RDFHandlerException, IOException { + final Model model = new LinkedHashModel(); + final RDFParser rdfParser = Rio.createParser(rdfFormat); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseIri); + return model; + } + @Override public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // diff --git a/rulewerk-commands/src/test/data/loadtest.nt b/rulewerk-commands/src/test/data/loadtest.nt new file mode 100644 index 000000000..89536774b --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.nt @@ -0,0 +1 @@ + . diff --git a/rulewerk-commands/src/test/data/loadtest.rdf b/rulewerk-commands/src/test/data/loadtest.rdf new file mode 100644 index 000000000..affae3f94 --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.rdf @@ -0,0 +1,9 @@ + + + + + + + + diff --git a/rulewerk-commands/src/test/data/loadtest.ttl b/rulewerk-commands/src/test/data/loadtest.ttl new file mode 100644 index 000000000..3fbe612de --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.ttl @@ -0,0 +1,3 @@ +@prefix : . + +:a :b :c . diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 52daa0c6a..ded18aa69 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -32,6 +32,7 @@ import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Predicate; @@ -85,6 +86,29 @@ public void correctUseWithRulesTask_succeeds() throws ParsingException, CommandE assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + @Test(expected = CommandExecutionException.class) + public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("not parsable".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseFileNotFoundError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + @Test public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); @@ -101,9 +125,10 @@ public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExe assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } - + @Test - public void correctUseWithOwlTask_UnsupportedAxioms_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseWithOwlTask_UnsupportedAxioms_succeeds() + throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); @@ -130,7 +155,7 @@ public void correctUseWithOwlTask_malformedOwl_fails() Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-fails.owl' ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void correctUseWithOwlTask_missingFile_fails() throws ParsingException, CommandExecutionException, IOException { @@ -141,26 +166,85 @@ public void correctUseWithOwlTask_missingFile_fails() interpreter.runCommand(command); } + @Test + public void correctUseWithRdfTask_Nt_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_Turtle_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.ttl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri(":")); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_RdfXml_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.rdf' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri("eg:")); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + @Test(expected = CommandExecutionException.class) - public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { + public void correctUseWithRdfTask_malformedRdf_fails() + throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); - InputStream inputStream = new ByteArrayInputStream("not parsable".getBytes(StandardCharsets.UTF_8)); - Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); - Interpreter interpreter = Mockito.spy(origInterpreter); - Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest-fails.owl' ."); interpreter.runCommand(command); } @Test(expected = CommandExecutionException.class) - public void correctUseFileNotFoundError_fails() throws ParsingException, CommandExecutionException, IOException { + public void correctUseWithRdfTask_missingFile_fails() + throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); - Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); - Interpreter interpreter = Mockito.spy(origInterpreter); - Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + Command command = interpreter.parseCommand("@load RDF 'src/test/data/file-does-not-exist.rdf' ."); interpreter.runCommand(command); } diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index 16e796b43..fe97c337b 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -46,7 +46,6 @@ org.openrdf.sesame sesame-rio-turtle ${openrdf.sesame.version} - test From 391e4cf22f25af9789fe6d9f98e1712dbcb0d1bb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 10:15:18 +0200 Subject: [PATCH 1002/1255] updated help --- .../rulewerk/commands/LoadCommandInterpreter.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 56ed2d104..3fc22b564 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -89,7 +89,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } else if (TASK_RDF.equals(task)) { loadRdf(interpreter, fileName); } else { - throw new CommandExecutionException("Unknown task " + task + ". Should be " + TASK_RLS + " or " + TASK_OWL); + throw new CommandExecutionException( + "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); } interpreter.printNormal( @@ -191,14 +192,15 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // + " file: path to the file to load\n" // - + " TASK: optional; one of RULES (default) or OWL:\n" // + + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // - + " OWL to load an OWL ontology and convert it to rules\n"); + + " OWL to load an OWL ontology and convert it to facts and rules\n" // + + " RDF to load an RDF document and convert it to facts for predicate TRIPLE[3]\n"); } @Override public String getSynopsis() { - return "load a knowledge base from file (in Rulewerk rls format)"; + return "load a knowledge base from file (in Rulewerk format, OWL, or RDF)"; } } From 33e39b8cf6f851f40deb8911a9fd2007e540b1a6 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 10:26:38 +0200 Subject: [PATCH 1003/1255] add missing \n after error --- .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 8754598de..a7b777f9c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -116,8 +116,7 @@ public Command readCommand() { try { return this.interpreter.parseCommand(readLine); } catch (final ParsingException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); - + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage() + "\n"); return null; } } From 0d120f15139d04381c150584043d668174b47e60 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 10:37:53 +0200 Subject: [PATCH 1004/1255] support @retract predicate[arity]. --- .../commands/AddSourceCommandInterpreter.java | 4 +- .../commands/RetractCommandInterpreter.java | 15 ++++++-- .../RetractCommandInterpreterTest.java | 38 ++++++++++++++++++- 3 files changed, 50 insertions(+), 7 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index 8f8a2e4b7..bf7e2aad8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -75,8 +75,8 @@ static Predicate extractPredicate(String predicateDeclaration) throws CommandExe arity = Integer.parseInt(arityString); } catch (IndexOutOfBoundsException | NumberFormatException e) { throw new CommandExecutionException( - "Predicate declaration must have the format \"predicateName[number]\" but was " - + predicateDeclaration); + "Predicate declaration must have the format \"predicateName[number]\" but was \"" + + predicateDeclaration + "\"."); } return Expressions.makePredicate(predicateName, arity); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 9acdbb4a3..2e74580c8 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -24,6 +24,7 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class RetractCommandInterpreter implements CommandInterpreter { @@ -44,9 +45,14 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio factCount += interpreter.getKnowledgeBase().removeStatement(fact); } else if (argument.fromRule().isPresent()) { ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); - } else { - throw new CommandExecutionException( - "Only facts and rules can be retracted. Encountered " + argument.toString()); + } else { // implies argument.fromTerm().isPresent() + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + if (predicate.equals(fact.getPredicate())) { + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } + } } } @@ -56,7 +62,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio @Override public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" - + " fact or rule: statement(s) to be removed from the knowledge base\n" + + " fact or rule: statement(s) to be removed from the knowledge base, or a predicate declaration\n" + + " of the form name[arity] to remove all facts for that predicate.\n" + "Reasoning needs to be invoked after finishing the removal of statements.\n"); } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java index 3381ac9ec..f9a8189cc 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java @@ -72,9 +72,45 @@ public void correctUse_succeeds() throws ParsingException, CommandExecutionExcep assertTrue(rules.isEmpty()); assertTrue(dataSourceDeclarations.isEmpty()); } + + @Test + public void correctUse_retractPredicate_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term b = Expressions.makeAbstractConstant("b"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Fact pa = Expressions.makeFact(p, a); + Fact pb = Expressions.makeFact(p, b); + Fact qa = Expressions.makeFact(q, a); + + interpreter.getKnowledgeBase().addStatement(pa); + interpreter.getKnowledgeBase().addStatement(pb); + interpreter.getKnowledgeBase().addStatement(qa); + + Command command = interpreter.parseCommand("@retract p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals(Arrays.asList(qa), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } @Test(expected = CommandExecutionException.class) - public void wrongArgumentTerm_fails() throws ParsingException, CommandExecutionException { + public void wrongArgumentTermNumber_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract 42 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermStringNoPredicate_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); From 5fd4a456e681cd45c7eb56ce23cff97ba931de25 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 26 Aug 2020 12:39:34 +0200 Subject: [PATCH 1005/1255] command parsing error print new line --- .../semanticweb/rulewerk/client/shell/DefaultConfiguration.java | 2 ++ .../main/java/org/semanticweb/rulewerk/client/shell/Shell.java | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 0a5a2c688..c53ad76be 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -57,8 +57,10 @@ public static LineReader buildLineReader(final Terminal terminal, final Interpre ; final LineReader lineReader = lineReaderBuilder.build(); + lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than // inserting a tab + lineReader.setOpt(LineReader.Option.AUTO_FRESH_LINE); return lineReader; } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 8754598de..889db2e81 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -116,7 +116,7 @@ public Command readCommand() { try { return this.interpreter.parseCommand(readLine); } catch (final ParsingException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage()); + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage() + "\n"); return null; } From ce8331263f5f6b6887c0224df9ce1771ceb83fef Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:30:51 +0200 Subject: [PATCH 1006/1255] expanded Skolemization features --- .../implementation/Skolemization.java | 103 ++++++++++++++++-- .../implementation/SkolemizationTest.java | 54 ++++++--- .../owlapi/OwlToRulesConversionHelper.java | 2 +- .../parser/javacc/JavaCCParserBase.java | 2 +- .../reasoner/vlog/TermToVLogConverter.java | 2 +- 5 files changed, 133 insertions(+), 30 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index b0bc00877..35859a969 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -23,36 +23,117 @@ import java.io.ByteArrayOutputStream; import java.util.UUID; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** - * A class that implements skolemization of named null names. The same name - * should be skolemized to the same {@link NamedNull} when skolemized using the - * same instance, but to two different instances of {@link NamedNull} when - * skolemized using different instances of {@link Skolemization}. + * A class that implements skolemization and collision-free renaming of named + * nulls. The same name will always be renamed in the same way when using the + * same instance of {@link Skolemization}, but it is extremely unlikely that + * different names or different instances will ever produce the same name. + * + * This can be used to rename apart named nulls from different input sources to + * avoid clashes. There is also code for creating skolem constants with + * appropriate absolute IRIs. * * @author Maximilian Marx */ public class Skolemization { + + /** + * IRI prefix used for IRIs skolem constants in Rulewerk. + */ + public final static String SKOLEM_IRI_PREFIX = "https://rulewerk.semantic-web.org/.well-known/genid/"; + /** + * Prefix used to ensure that UUID-based local names do not start with a number. + */ + private final static String SKOLEM_UUID_START = "ID"; + /** * The namespace to use for skolemizing named null names. */ private final byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); /** - * Skolemize a named null name. The same {@code name} will map to a - * {@link RenamedNamedNull} instance with the same name when called on the same - * instance. + * Creates a named null with a renamed name that is determined by the given + * original name. The result is a {@link RenamedNamedNull} to allow other code + * to recognise that no further renaming is necessary. * - * @return a {@link RenamedNamedNull} instance with a new name that is specific - * to this instance and {@code name}. + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return a {@link RenamedNamedNull} with a new name that is specific to this + * instance and {@code name}. + */ + public RenamedNamedNull getRenamedNamedNull(String name) { + return new RenamedNamedNull(getFreshName(name)); + } + + /** + * Creates a skolem constant that is determined by the given original name. + * + * @param name the name of the {@link NamedNull} to skolemize (or any other + * string for which to create a unique renaming) + * @return a {@link AbstractConstant} with an IRI that is specific to this + * instance and {@code name}. + */ + public AbstractConstant getSkolemConstant(String name) { + return new AbstractConstantImpl(getSkolemConstantName(name)); + } + + /** + * Creates a skolem constant that is determined by the given {@link NamedNull}. + * The method ensures that a new unique name is generated unless the given + * object is already a {@link RenamedNamedNull}. + * + * @param namedNull the {@link NamedNull} to skolemize + * @return a {@link AbstractConstant} with an IRI that is specific to this + * instance and {@code namedNull}. + */ + public AbstractConstant getSkolemConstant(NamedNull namedNull) { + if (namedNull instanceof RenamedNamedNull) { + return new AbstractConstantImpl(getSkolemConstantNameFromUniqueName(namedNull.getName())); + } else { + return new AbstractConstantImpl(getSkolemConstantName(namedNull.getName())); + } + } + + /** + * Returns the name (IRI string) of a skolem constant for skolemising a named + * null of the given name. + * + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return string that is an IRI for a skolem constant + */ + public String getSkolemConstantName(String name) { + return getSkolemConstantNameFromUniqueName(getFreshName(name).toString()); + } + + /** + * Returns a full skolem constant IRI string from its local id part. + * + * @param name local id of skolem constant + * @return IRI string + */ + private String getSkolemConstantNameFromUniqueName(String name) { + return SKOLEM_IRI_PREFIX + SKOLEM_UUID_START + name; + } + + /** + * Creates a fresh UUID based on the given string. The UUID is determined by the + * string and the instance of {@link Skolemization}. Other strings or instances + * are extremely unlikely to produce the same string. + * + * @param name the string to be renamed + * @return a UUID for the new name */ - public RenamedNamedNull skolemizeNamedNull(String name) { + public UUID getFreshName(String name) { byte[] nameBytes = name.getBytes(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); stream.write(namedNullNamespace, 0, namedNullNamespace.length); stream.write(nameBytes, 0, nameBytes.length); - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + return UUID.nameUUIDFromBytes(stream.toByteArray()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index 51dc0fe67..a382aa220 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -22,11 +22,11 @@ import static org.junit.Assert.*; -import java.io.IOException; - import org.junit.Before; import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; public class SkolemizationTest { private Skolemization skolemization; @@ -39,38 +39,60 @@ public void init() { } @Test - public void skolemizeNamedNull_sameName_mapsToSameNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); - NamedNull null2 = skolemization.skolemizeNamedNull(name1); + public void skolemizeNamedNull_sameName_mapsToSameNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + NamedNull null2 = skolemization.getRenamedNamedNull(name1); assertEquals(null1.getName(), null2.getName()); } @Test - public void skolemizeNamedNull_differentName_mapsToDifferentNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); - NamedNull null2 = skolemization.skolemizeNamedNull(name2); + public void skolemizeNamedNull_differentName_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + NamedNull null2 = skolemization.getRenamedNamedNull(name2); assertNotEquals(null1.getName(), null2.getName()); } @Test - public void skolemizeNamedNull_differentInstances_mapsToDifferentNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); + public void skolemizeNamedNull_differentInstances_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); Skolemization other = new Skolemization(); - NamedNull null2 = other.skolemizeNamedNull(name1); + NamedNull null2 = other.getRenamedNamedNull(name1); assertNotEquals(null1.getName(), null2.getName()); } @Test - public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentNamedNull() throws IOException { - NamedNull null1 = skolemization.skolemizeNamedNull(name1); + public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); Skolemization other = new Skolemization(); - NamedNull null2 = other.skolemizeNamedNull(name2); + NamedNull null2 = other.getRenamedNamedNull(name2); assertNotEquals(null1.getName(), null2.getName()); - assertEquals(null1.getName(), skolemization.skolemizeNamedNull(name1).getName()); - assertEquals(null2.getName(), other.skolemizeNamedNull(name2).getName()); + assertEquals(null1.getName(), skolemization.getRenamedNamedNull(name1).getName()); + assertEquals(null2.getName(), other.getRenamedNamedNull(name2).getName()); + } + + @Test + public void skolemConstant_succeeds() { + AbstractConstant skolem = skolemization.getSkolemConstant(name1); + assertTrue(skolem.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); + } + + @Test + public void skolemConstantFromNamedNull_succeeds() { + NamedNull null1 = new NamedNullImpl(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + assertEquals(skolem2, skolem1); + } + + @Test + public void skolemConstantFromRenamedNamedNull_succeeds() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + assertEquals(skolem2, skolem1); } } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index f5c737dc2..7028342cf 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -64,7 +64,7 @@ public static Term getIndividualTerm(final OWLIndividual owlIndividual, Skolemiz if (owlIndividual instanceof OWLNamedIndividual) { return new AbstractConstantImpl(((OWLNamedIndividual) owlIndividual).getIRI().toString()); } else if (owlIndividual instanceof OWLAnonymousIndividual) { - return skolemization.skolemizeNamedNull(((OWLAnonymousIndividual) owlIndividual).getID().toString()); + return skolemization.getRenamedNamedNull(((OWLAnonymousIndividual) owlIndividual).getID().toString()); } else { throw new OwlFeatureNotSupportedException( "Could not convert OWL individual '" + owlIndividual.toString() + "' to a term."); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 9ade274bf..f367bb382 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -162,7 +162,7 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } NamedNull createNamedNull(String lexicalForm) throws ParseException { - return this.skolemization.skolemizeNamedNull(lexicalForm); + return this.skolemization.getRenamedNamedNull(lexicalForm); } void addStatement(Statement statement) { diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 13a078076..84c224460 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -97,7 +97,7 @@ public static String getVLogNameForNamedNull(final NamedNull namedNull) { if (namedNull instanceof RenamedNamedNull) { return namedNull.getName(); } else { - return skolemization.skolemizeNamedNull(namedNull.getName()).getName(); + return skolemization.getRenamedNamedNull(namedNull.getName()).getName(); } } From 8b35c30c64f4592edc441657043d96e038072702 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:37:26 +0200 Subject: [PATCH 1007/1255] +method to get skolem name from named null --- .../implementation/Skolemization.java | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 35859a969..f8fae3c0f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -92,11 +92,8 @@ public AbstractConstant getSkolemConstant(String name) { * instance and {@code namedNull}. */ public AbstractConstant getSkolemConstant(NamedNull namedNull) { - if (namedNull instanceof RenamedNamedNull) { - return new AbstractConstantImpl(getSkolemConstantNameFromUniqueName(namedNull.getName())); - } else { - return new AbstractConstantImpl(getSkolemConstantName(namedNull.getName())); - } + return new AbstractConstantImpl(getSkolemConstantName(namedNull)); + } /** @@ -111,6 +108,23 @@ public String getSkolemConstantName(String name) { return getSkolemConstantNameFromUniqueName(getFreshName(name).toString()); } + /** + * Returns the name (IRI string) of a skolem constant for skolemising the given + * named {@link NamedNull}. The method ensures that a new unique name is + * generated unless the given object is already a {@link RenamedNamedNull}. + * + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return string that is an IRI for a skolem constant + */ + public String getSkolemConstantName(NamedNull namedNull) { + if (namedNull instanceof RenamedNamedNull) { + return getSkolemConstantNameFromUniqueName(namedNull.getName()); + } else { + return getSkolemConstantName(namedNull.getName()); + } + } + /** * Returns a full skolem constant IRI string from its local id part. * From 0e513c57398a8b9de146dbee02b4fc5a2f8d12e7 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:52:13 +0200 Subject: [PATCH 1008/1255] changed prefix for local skolem names --- .../rulewerk/core/reasoner/implementation/Skolemization.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index f8fae3c0f..e51a6b5d7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -49,7 +49,7 @@ public class Skolemization { /** * Prefix used to ensure that UUID-based local names do not start with a number. */ - private final static String SKOLEM_UUID_START = "ID"; + private final static String SKOLEM_UUID_START = "B-"; /** * The namespace to use for skolemizing named null names. From 2529528f730d81b3787db28c43192261a6c737d5 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 13:53:05 +0200 Subject: [PATCH 1009/1255] skolemize properly --- .../rulewerk/reasoner/vlog/TermToVLogConverter.java | 7 +------ .../reasoner/vlog/ModelToVLogConverterTest.java | 12 +++++++++--- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 84c224460..3e294f848 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -30,7 +30,6 @@ import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; -import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; /** @@ -94,11 +93,7 @@ public static String getVLogNameForConstant(final Constant constant) { * @return VLog constant string */ public static String getVLogNameForNamedNull(final NamedNull namedNull) { - if (namedNull instanceof RenamedNamedNull) { - return namedNull.getName(); - } else { - return skolemization.getRenamedNamedNull(namedNull.getName()).getName(); - } + return skolemization.getSkolemConstantName(namedNull); } /** diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java index 653cb5401..70c188ffc 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java @@ -47,6 +47,7 @@ import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; public class ModelToVLogConverterTest { @@ -125,22 +126,26 @@ public void testToVLogTermBlank() { @Test public void testToVLogTermBlankSkolemization() { + final Skolemization skolemization = new Skolemization(); final NamedNull blank = new NamedNullImpl("blank"); final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); assertNotEquals("blank", vLogSkolemConstant); - assertEquals(36, vLogSkolemConstant.length()); // length of a UUID + // generated ids differ by Skolemization instance, but should have the same + // length: + assertEquals(skolemization.getSkolemConstantName(blank).length(), vLogSkolemConstant.length()); } @Test public void testToVLogTermBlankRenamedSkolemization() { + final Skolemization skolemization = new Skolemization(); final UUID uuid = UUID.randomUUID(); final NamedNull blank = new RenamedNamedNull(uuid); final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); - assertEquals(uuid.toString(), vLogSkolemConstant); + assertEquals(skolemization.getSkolemConstantName(blank), vLogSkolemConstant); } @Test @@ -192,11 +197,12 @@ public void testToVLogFactTuples() { @Test public void testToVLogFactTupleNulls() { + final Skolemization skolemization = new Skolemization(); final UUID uuid = UUID.randomUUID(); final NamedNull n = new RenamedNamedNull(uuid); final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(n)); - final String[] expectedTuple = { uuid.toString() }; + final String[] expectedTuple = { skolemization.getSkolemConstantName(n) }; final String[] actualTuple = ModelToVLogConverter.toVLogFactTuple(atom1); From fb83ad6ef382192428dde0d56d05e9aa8e5f12dd Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 14:47:36 +0200 Subject: [PATCH 1010/1255] make RDF converter non-static --- .../commands/LoadCommandInterpreter.java | 3 +- .../examples/rdf/AddDataFromRdfModel.java | 13 ++- .../rulewerk/rdf/RdfModelConverter.java | 39 +++++-- .../rulewerk/rdf/RdfValueToTermConverter.java | 45 ++++++-- .../rdf/RdfValueToTermConverterTest.java | 104 ++++++++++++++++++ .../rdf/TestConvertRdfFileToFacts.java | 16 +-- .../rulewerk/rdf/TestReasonOverRdfFacts.java | 6 +- 7 files changed, 191 insertions(+), 35 deletions(-) create mode 100644 rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 3fc22b564..ec8a2472d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -165,7 +165,8 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe throw new CommandExecutionException(message); } - interpreter.getKnowledgeBase().addStatements(RdfModelConverter.rdfModelToFacts(model)); + RdfModelConverter rdfModelConverter = new RdfModelConverter(true); + interpreter.getKnowledgeBase().addStatements(rdfModelConverter.rdfModelToFacts(model)); for (Namespace namespace : model.getNamespaces()) { try { interpreter.getKnowledgeBase().getPrefixDeclarationRegistry() diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index 79298dd2c..8600edae6 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -65,6 +65,7 @@ public static void main(final String[] args) throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { ExamplesUtils.configureLogging(); + RdfModelConverter rdfModelConverter = new RdfModelConverter(); /* * Local file containing metadata of publications from ISWC'16 conference, in @@ -77,10 +78,10 @@ public static void main(final String[] args) RDFFormat.RDFXML); /* - * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each + * having the ternary predicate "TRIPLE". */ - final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); + final Set tripleFactsISWC2016 = rdfModelConverter.rdfModelToFacts(rdfModelISWC2016); System.out.println("Example triple fact from iswc-2016 dataset:"); System.out.println(" - " + tripleFactsISWC2016.iterator().next()); @@ -96,10 +97,10 @@ public static void main(final String[] args) RDFFormat.TURTLE); /* - * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each + * having the ternary predicate "TRIPLE". */ - final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); + final Set tripleFactsISWC2017 = rdfModelConverter.rdfModelToFacts(rdfModelISWC2017); System.out.println("Example triple fact from iswc-2017 dataset:"); System.out.println(" - " + tripleFactsISWC2017.iterator().next()); diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index e5547b8dd..986d044b7 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -76,32 +76,55 @@ public final class RdfModelConverter { */ public static final Predicate RDF_TRIPLE_PREDICATE = Expressions.makePredicate(RDF_TRIPLE_PREDICATE_NAME, 3); - private RdfModelConverter() { + final RdfValueToTermConverter rdfValueToTermConverter; + + /** + * Construct an object that does not skolemize blank nodes. + */ + public RdfModelConverter() { + this(false); + } + + /** + * Constructor. + * + * @param skolemize if true, blank nodes are translated to constants with + * generated IRIs; otherwise they are replanced by named nulls + * with generated ids + */ + public RdfModelConverter(boolean skolemize) { + rdfValueToTermConverter = new RdfValueToTermConverter(skolemize); } /** * Converts each {@code } triple statement of the - * given {@code rdfModel} into a {@link PositiveLiteral} of the form + * given {@code rdfModel} into a {@link Fact} of the form * {@code TRIPLE(subject, predicate, object)}. See * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}, the ternary predicate used * for all literals generated from RDF triples. * * @param rdfModel a {@link Model} of an RDF document, containing triple * statements that will be converter to facts. - * @return a set of literals corresponding to the statements of given + * @return a set of facts corresponding to the statements of given * {@code rdfModel}. */ - public static Set rdfModelToFacts(final Model rdfModel) { - return rdfModel.stream().map(RdfModelConverter::rdfStatementToFact).collect(Collectors.toSet()); + public Set rdfModelToFacts(final Model rdfModel) { + return rdfModel.stream().map((statement) -> rdfStatementToFact(statement)).collect(Collectors.toSet()); } - static Fact rdfStatementToFact(final Statement statement) { + /** + * Converts an RDF statement (triple) to a Rulewerk {@link Fact}. + * + * @param statement + * @return + */ + Fact rdfStatementToFact(final Statement statement) { final Resource subject = statement.getSubject(); final URI predicate = statement.getPredicate(); final Value object = statement.getObject(); - return Expressions.makeFact(RDF_TRIPLE_PREDICATE, Arrays.asList(RdfValueToTermConverter.rdfValueToTerm(subject), - RdfValueToTermConverter.rdfValueToTerm(predicate), RdfValueToTermConverter.rdfValueToTerm(object))); + return Expressions.makeFact(RDF_TRIPLE_PREDICATE, Arrays.asList(rdfValueToTermConverter.convertValue(subject), + rdfValueToTermConverter.convertValue(predicate), rdfValueToTermConverter.convertValue(object))); } } diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index 058228665..e12704c3c 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -29,39 +29,62 @@ import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +/** + * Helper class to convert RDF ters to Rulewerk {@link Term} objects. + * + * @author Markus Kroetzsch + * + */ final class RdfValueToTermConverter { - private RdfValueToTermConverter() { + final boolean skolemize; + final Skolemization skolemization = new Skolemization(); + + /** + * Constructor. + * + * @param skolemize if true, blank nodes are translated to constants with + * generated IRIs; otherwise they are replanced by named nulls + * with generated ids + */ + public RdfValueToTermConverter(boolean skolemize) { + this.skolemize = skolemize; } - static Term rdfValueToTerm(final Value value) { + public Term convertValue(final Value value) { if (value instanceof BNode) { - return rdfBlankNodeToBlank((BNode) value); + return convertBlankNode((BNode) value); } else if (value instanceof Literal) { - return rdfLiteralToConstant((Literal) value); + return convertLiteral((Literal) value); } else if (value instanceof URI) { - return rdfUriToConstant((URI) value); + return convertUri((URI) value); } else { throw new RulewerkRuntimeException("Unknown value type: " + value.getClass()); } } - static Term rdfBlankNodeToBlank(final BNode bNode) { - // IDs are generated to be unique in every Model. - return new NamedNullImpl(bNode.getID()); + public Term convertBlankNode(final BNode bNode) { + // Note: IDs are generated to be unique in every Model, so our renaming might be + // redundant. But we want a RenamedNamedNull here, and a consistent name format + // is nice too. + if (skolemize) { + return skolemization.getSkolemConstant(bNode.getID()); + } else { + return skolemization.getRenamedNamedNull(bNode.getID()); + } } - static Term rdfUriToConstant(final URI uri) { + public Term convertUri(final URI uri) { final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); return new AbstractConstantImpl(escapedURIString); } - static Term rdfLiteralToConstant(final Literal literal) { + public Term convertLiteral(final Literal literal) { final URI datatype = literal.getDatatype(); if (datatype != null) { return new DatatypeConstantImpl(XMLDatatypeUtil.normalize(literal.getLabel(), datatype), diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java new file mode 100644 index 000000000..2bc4879f7 --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java @@ -0,0 +1,104 @@ +package org.semanticweb.rulewerk.rdf; + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.mockito.Mockito; +import org.openrdf.model.BNode; +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.impl.BNodeImpl; +import org.openrdf.model.impl.LiteralImpl; +import org.openrdf.model.impl.URIImpl; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; + +public class RdfValueToTermConverterTest { + + @Test + public void convertUri_succeeds() { + URI uri = new URIImpl("http://example.org"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(uri); + + assertEquals(TermType.ABSTRACT_CONSTANT, term.getType()); + assertEquals("http://example.org", term.getName()); + } + + @Test + public void convertLiteralDatatype_succeeds() { + Literal literal = new LiteralImpl("42", new URIImpl("http://example.org/integer")); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.DATATYPE_CONSTANT, term.getType()); + DatatypeConstant datataypeConstant = (DatatypeConstant) term; + assertEquals("http://example.org/integer", datataypeConstant.getDatatype()); + assertEquals("42", datataypeConstant.getLexicalValue()); + } + + @Test + public void convertLiteralLanguage_succeeds() { + Literal literal = new LiteralImpl("Test", "de"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.LANGSTRING_CONSTANT, term.getType()); + LanguageStringConstant langStringConstant = (LanguageStringConstant) term; + assertEquals("Test", langStringConstant.getString()); + assertEquals("de", langStringConstant.getLanguageTag()); + } + + @Test + public void convertLiteralString_succeeds() { + Literal literal = new LiteralImpl("RDF 1.0 untyped"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.DATATYPE_CONSTANT, term.getType()); + DatatypeConstant datataypeConstant = (DatatypeConstant) term; + assertEquals(PrefixDeclarationRegistry.XSD_STRING, datataypeConstant.getDatatype()); + assertEquals("RDF 1.0 untyped", datataypeConstant.getLexicalValue()); + } + + @Test + public void convertBNodeSkolemize_succeeds() { + BNode bnode = new BNodeImpl("myid"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(bnode); + + assertEquals(TermType.ABSTRACT_CONSTANT, term.getType()); + assertTrue(term.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); + } + + @Test + public void convertBNode_succeeds() { + BNode bnode = new BNodeImpl("myid"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(false); + Term term = converter.convertValue(bnode); + + assertEquals(TermType.NAMED_NULL, term.getType()); + assertNotEquals("myid", term.getName()); + } + + @Test(expected=RulewerkRuntimeException.class) + public void convertValueUnkownType_fails() { + Value value = Mockito.mock(Value.class); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + converter.convertValue(value); + } + +} diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java index 052ab1f5a..6957d5646 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java @@ -48,6 +48,8 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class TestConvertRdfFileToFacts { + + final RdfModelConverter rdfModelConverter = new RdfModelConverter(); // FIXME: The openrdf parser does neither support '\b' nor '\f' (from ASCII) and // encodes such characters as "\u0008" and "\u000C", respectively (the @@ -127,7 +129,7 @@ public class TestConvertRdfFileToFacts { public void testDataTypesNormalized() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils .parseFile(new File(RdfTestUtils.INPUT_FOLDER + "unnormalizedLiteralValues.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedNormalizedFacts, facts); } @@ -135,7 +137,7 @@ public void testDataTypesNormalized() throws RDFHandlerException, RDFParseExcept public void testLiteralValuesPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "literalValues.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedLiteralFacts, facts); } @@ -143,7 +145,7 @@ public void testLiteralValuesPreserved() throws RDFHandlerException, RDFParseExc public void testRelativeURIsMadeAbsolute() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "relativeURIs.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedRelativeUriFacts, facts); } @@ -151,7 +153,7 @@ public void testRelativeURIsMadeAbsolute() throws RDFHandlerException, RDFParseE public void testEscapedCharactersExpanded() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "escapedCharacters.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedEscapedCharacterFacts, facts); } @@ -159,7 +161,7 @@ public void testEscapedCharactersExpanded() throws RDFHandlerException, RDFParse public void testLanguageTagsPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "languageTags.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); assertEquals(expectedLanguageTagFacts, facts); } @@ -167,7 +169,7 @@ public void testLanguageTagsPreserved() throws RDFHandlerException, RDFParseExce public void testCollectionsPreserved() throws RDFHandlerException, RDFParseException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "collections.ttl"), RDFFormat.TURTLE); - final Set factsFromModel = RdfModelConverter.rdfModelToFacts(model); + final Set factsFromModel = rdfModelConverter.rdfModelToFacts(model); final Term blank1 = RdfTestUtils.getObjectOfFirstMatchedTriple(file2, fileA, factsFromModel); final Term blank2 = RdfTestUtils.getObjectOfFirstMatchedTriple(file3, fileA, factsFromModel); @@ -215,7 +217,7 @@ public void testBlankNodesWithSameLabelAreDifferentInDifferentModels() private Set getBlanksFromTurtleFile(final File file) throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(file, RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); final Set blanks = new HashSet<>(); facts.forEach(fact -> blanks.addAll(fact.getNamedNulls().collect(Collectors.toSet()))); diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index 214d32d6e..ce4d9f3b8 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -47,6 +47,8 @@ import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; public class TestReasonOverRdfFacts { + + final RdfModelConverter rdfModelConverter = new RdfModelConverter(); private final Constant carlBenz = Expressions.makeAbstractConstant("https://example.org/Carl-Benz"); private final Constant invention = Expressions.makeAbstractConstant("https://example.org/invention"); @@ -64,7 +66,7 @@ public class TestReasonOverRdfFacts { public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(facts); @@ -83,7 +85,7 @@ public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandl public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandlerException, IOException { final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), RDFFormat.TURTLE); - final Set facts = RdfModelConverter.rdfModelToFacts(model); + final Set facts = rdfModelConverter.rdfModelToFacts(model); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatements(facts); From 61e89a6c615952e79008e0be17c93c28f8aa7032 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:18:45 +0200 Subject: [PATCH 1011/1255] more features for RDF converter --- .../commands/LoadCommandInterpreter.java | 12 +-- .../rulewerk/rdf/RdfModelConverter.java | 60 +++++++++++++- .../src/test/data/input/test-turtle.ttl | 3 + ...a => IntegrationTestsConvertRdfFiles.java} | 2 +- ...grationTestsReasonOverRdfFactsinVLog.java} | 2 +- .../rulewerk/rdf/RdfModelConverterTest.java | 83 +++++++++++++++++++ 6 files changed, 145 insertions(+), 17 deletions(-) create mode 100644 rulewerk-rdf/src/test/data/input/test-turtle.ttl rename rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/{TestConvertRdfFileToFacts.java => IntegrationTestsConvertRdfFiles.java} (99%) rename rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/{TestReasonOverRdfFacts.java => IntegrationTestsReasonOverRdfFactsinVLog.java} (98%) create mode 100644 rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index ec8a2472d..835a506b9 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -31,7 +31,6 @@ import java.util.List; import org.openrdf.model.Model; -import org.openrdf.model.Namespace; import org.openrdf.model.impl.LinkedHashModel; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; @@ -43,7 +42,6 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; @@ -166,15 +164,7 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe } RdfModelConverter rdfModelConverter = new RdfModelConverter(true); - interpreter.getKnowledgeBase().addStatements(rdfModelConverter.rdfModelToFacts(model)); - for (Namespace namespace : model.getNamespaces()) { - try { - interpreter.getKnowledgeBase().getPrefixDeclarationRegistry() - .setPrefixIri(namespace.getPrefix() + ":", namespace.getName()); - } catch (PrefixDeclarationException e) { - // ignore this prefix - } - } + rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); } catch (IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); } diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index 986d044b7..fb14af98b 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -27,17 +27,22 @@ import org.openrdf.model.BNode; import org.openrdf.model.Literal; import org.openrdf.model.Model; +import org.openrdf.model.Namespace; import org.openrdf.model.Resource; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for converting RDF {@link Model}s to {@link PositiveLiteral} sets. @@ -59,10 +64,13 @@ * * * @author Irina Dragoste + * @author Markus Kroetzsch * */ public final class RdfModelConverter { + private static Logger LOGGER = LoggerFactory.getLogger(RdfModelConverter.class); + /** * The name of the ternary predicate of literals generated from RDF triples: * "TRIPLE". @@ -103,13 +111,57 @@ public RdfModelConverter(boolean skolemize) { * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}, the ternary predicate used * for all literals generated from RDF triples. * - * @param rdfModel a {@link Model} of an RDF document, containing triple - * statements that will be converter to facts. + * @param model a {@link Model} of an RDF document, containing triple statements + * that will be converter to facts. * @return a set of facts corresponding to the statements of given * {@code rdfModel}. */ - public Set rdfModelToFacts(final Model rdfModel) { - return rdfModel.stream().map((statement) -> rdfStatementToFact(statement)).collect(Collectors.toSet()); + public Set rdfModelToFacts(final Model model) { + return model.stream().map((statement) -> rdfStatementToFact(statement)).collect(Collectors.toSet()); + } + + /** + * Adds data and prefix declarations from a given RDF {@link Model} to a given + * {@link KnowledgeBase}. + * + * @param knowledgeBase the {@link KnowledgeBase} to add to + * @param model the {@link Model} with the RDF data + */ + public void addAll(KnowledgeBase knowledgeBase, Model model) { + addPrefixes(knowledgeBase, model); + addFacts(knowledgeBase, model); + } + + /** + * Adds the data from a given RDF {@link Model} as {@link Fact}s to the given + * {@link KnowledgeBase}. + * + * @param knowledgeBase the {@link KnowledgeBase} to add {@link Fact}s to + * @param model the {@link Model} with the RDF data + */ + public void addFacts(KnowledgeBase knowledgeBase, Model model) { + model.stream().forEach((statement) -> { + knowledgeBase.addStatement(rdfStatementToFact(statement)); + }); + } + + /** + * Adds the prefixes declared for a given RDF {@link Model} to the given + * {@link KnowledgeBase}. If a prefix cannot be added for some reason, it is + * ignored and a warning is logged. + * + * @param knowledgeBase the {@link KnowledgeBase} to add prefix declarations to + * @param model the {@link Model} with the RDF data + */ + public void addPrefixes(KnowledgeBase knowledgeBase, Model model) { + for (Namespace namespace : model.getNamespaces()) { + try { + knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri(namespace.getPrefix() + ":", + namespace.getName()); + } catch (PrefixDeclarationException e) { + LOGGER.warn("Failed to set prefix \"" + namespace.getPrefix() + "\" from RDF model: " + e.getMessage()); + } + } } /** diff --git a/rulewerk-rdf/src/test/data/input/test-turtle.ttl b/rulewerk-rdf/src/test/data/input/test-turtle.ttl new file mode 100644 index 000000000..3fbe612de --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/test-turtle.ttl @@ -0,0 +1,3 @@ +@prefix : . + +:a :b :c . diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java similarity index 99% rename from rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java index 6957d5646..823a1589f 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java @@ -47,7 +47,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; -public class TestConvertRdfFileToFacts { +public class IntegrationTestsConvertRdfFiles { final RdfModelConverter rdfModelConverter = new RdfModelConverter(); diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java similarity index 98% rename from rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java index ce4d9f3b8..4143669f4 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java @@ -46,7 +46,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class TestReasonOverRdfFacts { +public class IntegrationTestsReasonOverRdfFactsinVLog { final RdfModelConverter rdfModelConverter = new RdfModelConverter(); diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java new file mode 100644 index 000000000..77b280d26 --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java @@ -0,0 +1,83 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.junit.Test; +import org.openrdf.model.Model; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class RdfModelConverterTest { + + @Test + public void addToKnowledgeBase_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + rdfModelConverter.addAll(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + assertEquals("http://example.org/", knowledgeBase.getPrefixIri(":")); + } + + @Test + public void getFactSet_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + Set expected = new HashSet(); + expected.add(fact); + + Set facts = rdfModelConverter.rdfModelToFacts(model); + + assertEquals(expected, facts); + } + +} From 6e702a9133a3a8bc9f02e9080d7ef404c3a0d2a3 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:23:21 +0200 Subject: [PATCH 1012/1255] mention RDF improvements --- RELEASE-NOTES.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index ec4e55973..d7fbee0a6 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -9,15 +9,18 @@ New features: * Significant speedup in iterating over query results * Support for using data from a Trident database, the recommended data source for large RDF graphs in VLog +* More features to control how Rulewerk imports RDF data using rulewerk-rdf module * New class `LiteralQueryResultPrinter` for pretty-printing query results Other improvements: * Improved serialization of knowledge bases (using namespaces) * Simple (non-IRI, namespace-less) predicate names can now include - and _ +* Nulls in input data (aka "blank nodes") are now properly skolemized for VLog * InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where two or more edges are the same. Breaking changes: +* The `RdfModelConverter` class from the rdf package is no longer static (and has more options) * The `Serializer` class in the core package has been replaced by a new implementation with a completely different interface. * The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. From be6b025dba338f2706ba296bacf394be7bb50970 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:30:47 +0200 Subject: [PATCH 1013/1255] support setting custom triple predicate name --- .../commands/LoadCommandInterpreter.java | 3 +- .../rulewerk/rdf/RdfModelConverter.java | 33 +++++++++---------- .../rulewerk/rdf/RdfModelConverterTest.java | 18 ++++++++++ .../rdf/RdfValueToTermConverterTest.java | 20 +++++++++++ 4 files changed, 56 insertions(+), 18 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 835a506b9..099e29fe4 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -163,7 +163,8 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe throw new CommandExecutionException(message); } - RdfModelConverter rdfModelConverter = new RdfModelConverter(true); + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, + RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME); rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); } catch (IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index fb14af98b..fe61636a3 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -72,36 +72,35 @@ public final class RdfModelConverter { private static Logger LOGGER = LoggerFactory.getLogger(RdfModelConverter.class); /** - * The name of the ternary predicate of literals generated from RDF triples: - * "TRIPLE". + * The name of the ternary predicate of literals generated from RDF triples by + * default. */ public static final String RDF_TRIPLE_PREDICATE_NAME = "TRIPLE"; - /** - * The ternary predicate of literals generated from RDF triples. It has - * {@code name}({@link Predicate#getName()}) "TRIPLE" and - * {@code arity}({@link Predicate#getArity()}) 3. - */ - public static final Predicate RDF_TRIPLE_PREDICATE = Expressions.makePredicate(RDF_TRIPLE_PREDICATE_NAME, 3); - final RdfValueToTermConverter rdfValueToTermConverter; + final Predicate triplePredicate; /** - * Construct an object that does not skolemize blank nodes. + * Construct an object that does not skolemize blank nodes and that uses a + * ternary predicate named {@link RdfModelConverter#RDF_TRIPLE_PREDICATE_NAME} + * for storing triples. */ public RdfModelConverter() { - this(false); + this(false, RDF_TRIPLE_PREDICATE_NAME); } /** * Constructor. * - * @param skolemize if true, blank nodes are translated to constants with - * generated IRIs; otherwise they are replanced by named nulls - * with generated ids + * @param skolemize if true, blank nodes are translated to constants + * with generated IRIs; otherwise they are replanced + * by named nulls with generated ids + * @param triplePredicateName name of the ternary predicate that should be used + * to store RDF triples */ - public RdfModelConverter(boolean skolemize) { - rdfValueToTermConverter = new RdfValueToTermConverter(skolemize); + public RdfModelConverter(boolean skolemize, String triplePredicateName) { + this.rdfValueToTermConverter = new RdfValueToTermConverter(skolemize); + this.triplePredicate = Expressions.makePredicate(triplePredicateName, 3); } /** @@ -175,7 +174,7 @@ Fact rdfStatementToFact(final Statement statement) { final URI predicate = statement.getPredicate(); final Value object = statement.getObject(); - return Expressions.makeFact(RDF_TRIPLE_PREDICATE, Arrays.asList(rdfValueToTermConverter.convertValue(subject), + return Expressions.makeFact(triplePredicate, Arrays.asList(rdfValueToTermConverter.convertValue(subject), rdfValueToTermConverter.convertValue(predicate), rdfValueToTermConverter.convertValue(object))); } diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java index 77b280d26..6135416f4 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java @@ -80,4 +80,22 @@ public void getFactSet_succeeds() assertEquals(expected, facts); } + @Test + public void addFactsCustomTriplePredicate_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, "mytriple"); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("mytriple", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + } diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java index 2bc4879f7..16b40036f 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.rdf; +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import org.junit.Test; From 2d1f93f1e67694586087eb6b6bf61074f5c1bb06 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 15:53:55 +0200 Subject: [PATCH 1014/1255] support custom RDF load predicate --- .../commands/LoadCommandInterpreter.java | 29 ++++++++++---- .../commands/LoadCommandInterpreterTest.java | 38 +++++++++++++++++++ 2 files changed, 59 insertions(+), 8 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 099e29fe4..bec4713dc 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -68,14 +68,26 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (command.getArguments().size() > 0 && command.getArguments().get(0).fromTerm().isPresent() && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { task = Interpreter.extractNameArgument(command, 0, "task"); - Interpreter.validateArgumentCount(command, 2); pos++; } else { task = TASK_RLS; - Interpreter.validateArgumentCount(command, 1); } String fileName = Interpreter.extractStringArgument(command, pos, "filename"); + pos++; + + String rdfTriplePredicate = RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; + if (TASK_RDF.equals(task) && command.getArguments().size() > pos) { + if (command.getArguments().get(pos).fromTerm().isPresent() + && command.getArguments().get(pos).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + rdfTriplePredicate = command.getArguments().get(pos).fromTerm().get().getName(); + pos++; + } else { + throw new CommandExecutionException("Optional triple predicate name must be an IRI."); + } + } + + Interpreter.validateArgumentCount(command, pos); int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); @@ -85,7 +97,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } else if (TASK_OWL.equals(task)) { loadOwl(interpreter, fileName); } else if (TASK_RDF.equals(task)) { - loadRdf(interpreter, fileName); + loadRdf(interpreter, fileName, rdfTriplePredicate); } else { throw new CommandExecutionException( "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); @@ -137,7 +149,8 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); } - private void loadRdf(Interpreter interpreter, String fileName) throws CommandExecutionException { + private void loadRdf(Interpreter interpreter, String fileName, String triplePredicateName) + throws CommandExecutionException { try { String baseIri = new File(fileName).toURI().toString(); @@ -163,8 +176,7 @@ private void loadRdf(Interpreter interpreter, String fileName) throws CommandExe throw new CommandExecutionException(message); } - RdfModelConverter rdfModelConverter = new RdfModelConverter(true, - RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME); + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, triplePredicateName); rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); } catch (IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); @@ -182,12 +194,13 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " [TASK] \n" // + interpreter.printNormal("Usage: @" + commandName + " [TASK] [RDF predicate]\n" // + " file: path to the file to load\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // - + " RDF to load an RDF document and convert it to facts for predicate TRIPLE[3]\n"); + + " RDF to load an RDF document and convert it to facts\n" + + " RDF predicate: optional name of the predicate used for loading RDF triples (default: TRIPLE)\n"); } @Override diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index ded18aa69..7eae820a5 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -185,6 +185,26 @@ public void correctUseWithRdfTask_Nt_succeeds() assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } + + @Test + public void correctUseWithRdfTask_NtCustomPredicate_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/mytriple", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } @Test public void correctUseWithRdfTask_Turtle_succeeds() @@ -256,6 +276,24 @@ public void wrongArgumentCount_fails() throws ParsingException, CommandExecution Command command = interpreter.parseCommand("@load ."); interpreter.runCommand(command); } + + @Test(expected = CommandExecutionException.class) + public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF \"file.nt\" \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongRdfPredicateArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF \"file.nt\" p(a) ."); + interpreter.runCommand(command); + } @Test(expected = CommandExecutionException.class) public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { From 0a4d23fcdc8c7e5f68f6f52f3b010265d6c9301a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 16:04:46 +0200 Subject: [PATCH 1015/1255] do not generate new prefix when setting same namespace twice --- .../MergingPrefixDeclarationRegistry.java | 11 ++++++++--- .../model/MergingPrefixDeclarationRegistryTest.java | 11 +++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 5dade8819..3413f19c4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -82,15 +82,20 @@ public void setBaseIri(String baseIri) { } /** - * Registers a prefix declaration. If prefixName is already registered, a - * freshly generated name will be used instead. + * Registers a prefix declaration. If prefixName is already registered for + * another IRI, a freshly generated name will be used instead. * * @param prefixName the name of the prefix. * @param prefixIri the IRI of the prefix. */ @Override public void setPrefixIri(String prefixName, String prefixIri) { - String name = prefixes.containsKey(prefixName) ? getFreshPrefix() : prefixName; + String name; + if (prefixes.containsKey(prefixName) && !prefixIri.equals(prefixes.get(prefixName))) { + name = getFreshPrefix(); + } else { + name = prefixName; + } prefixes.put(name, prefixIri); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 6705ba25d..cc46e3035 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -23,6 +23,8 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; +import java.util.stream.StreamSupport; + import org.junit.Before; import org.junit.Test; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; @@ -100,6 +102,15 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); + assertEquals(2, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); + } + + @Test + public void setPrefixIri_setSamePrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); + assertEquals(1, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } @Test From b1c9516bb2f4a3a24a1a94686780db7f34ec9117 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 16:48:29 +0200 Subject: [PATCH 1016/1255] support ABox style RDF loading --- .../model/api/PrefixDeclarationRegistry.java | 3 ++ .../rulewerk/rdf/RdfModelConverter.java | 39 ++++++++++---- .../rulewerk/rdf/RdfValueToTermConverter.java | 7 +++ .../data/input/test-turtle-type-weird.ttl | 4 ++ .../src/test/data/input/test-turtle-type.ttl | 4 ++ .../rulewerk/rdf/RdfModelConverterTest.java | 52 +++++++++++++++++++ .../rdf/RdfValueToTermConverterTest.java | 14 ++++- 7 files changed, 112 insertions(+), 11 deletions(-) create mode 100644 rulewerk-rdf/src/test/data/input/test-turtle-type-weird.ttl create mode 100644 rulewerk-rdf/src/test/data/input/test-turtle-type.ttl diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index fd82ade16..37d62280e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -44,6 +44,9 @@ public interface PrefixDeclarationRegistry extends Iterable . +@prefix rdf: . + +:a rdf:type "test"@de . diff --git a/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl b/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl new file mode 100644 index 000000000..53844257a --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl @@ -0,0 +1,4 @@ +@prefix : . +@prefix rdf: . + +:a rdf:type :c . diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java index 6135416f4..0ab610723 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java @@ -36,6 +36,7 @@ import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -98,4 +99,55 @@ public void addFactsCustomTriplePredicate_succeeds() assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); } + @Test + public void addFactsNoTriplePredicate_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("http://example.org/b", 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + + @Test + public void addFactsNoTriplePredicateType_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle-type.ttl"), + RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("http://example.org/c", 1); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, terma); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + + @Test + public void addFactsNoTriplePredicateTypeWeird_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle-type-weird.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate(PrefixDeclarationRegistry.RDF_TYPE, 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeLanguageStringConstant("test", "de"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + } diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java index 16b40036f..29dab9abf 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java @@ -34,6 +34,7 @@ import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.TermType; @@ -52,6 +53,17 @@ public void convertUri_succeeds() { assertEquals("http://example.org", term.getName()); } + @Test + public void convertUriToPredicate_succeeds() { + URI uri = new URIImpl("http://example.org/mypred"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Predicate predicate = converter.convertUriToPredicate(uri, 2); + + assertEquals("http://example.org/mypred", predicate.getName()); + assertEquals(2, predicate.getArity()); + } + @Test public void convertLiteralDatatype_succeeds() { Literal literal = new LiteralImpl("42", new URIImpl("http://example.org/integer")); @@ -113,7 +125,7 @@ public void convertBNode_succeeds() { assertNotEquals("myid", term.getName()); } - @Test(expected=RulewerkRuntimeException.class) + @Test(expected = RulewerkRuntimeException.class) public void convertValueUnkownType_fails() { Value value = Mockito.mock(Value.class); From 285e53516775f1f0a8ac8febf53a1d53b225374a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 16:58:31 +0200 Subject: [PATCH 1017/1255] support ABox style RDF import --- .../commands/LoadCommandInterpreter.java | 5 ++++ .../commands/LoadCommandInterpreterTest.java | 28 ++++++++++++++++--- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index bec4713dc..63faf984f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -61,6 +61,8 @@ public class LoadCommandInterpreter implements CommandInterpreter { static final String TASK_OWL = "OWL"; static final String TASK_RDF = "RDF"; + static final String PREDICATE_ABOX = "ABOX"; + @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { String task; @@ -81,6 +83,9 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio if (command.getArguments().get(pos).fromTerm().isPresent() && command.getArguments().get(pos).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { rdfTriplePredicate = command.getArguments().get(pos).fromTerm().get().getName(); + if (PREDICATE_ABOX.equals(rdfTriplePredicate)) { // ABox-style import + rdfTriplePredicate = null; + } pos++; } else { throw new CommandExecutionException("Optional triple predicate name must be an IRI."); diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index 7eae820a5..ae4d121a2 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -185,7 +185,7 @@ public void correctUseWithRdfTask_Nt_succeeds() assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); } - + @Test public void correctUseWithRdfTask_NtCustomPredicate_succeeds() throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { @@ -198,7 +198,27 @@ public void correctUseWithRdfTask_NtCustomPredicate_succeeds() Term termc = Expressions.makeAbstractConstant("http://example.org/c"); Fact fact = Expressions.makeFact(predicate, terma, termb, termc); - Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + Command command = interpreter + .parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_NtABoxLoading_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/b", 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ABOX."); interpreter.runCommand(command); assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); @@ -276,7 +296,7 @@ public void wrongArgumentCount_fails() throws ParsingException, CommandExecution Command command = interpreter.parseCommand("@load ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); @@ -285,7 +305,7 @@ public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandEx Command command = interpreter.parseCommand("@load RDF \"file.nt\" \"string\" ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void wrongRdfPredicateArgumentType_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); From e1b7cdb5e59e97eb04f1fdce0895a13baf15d925 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 26 Aug 2020 17:05:21 +0200 Subject: [PATCH 1018/1255] updated help --- .../rulewerk/commands/LoadCommandInterpreter.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 63faf984f..a5dd3196c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -200,12 +200,14 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S @Override public void printHelp(String commandName, Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] [RDF predicate]\n" // - + " file: path to the file to load\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // - + " RDF to load an RDF document and convert it to facts\n" - + " RDF predicate: optional name of the predicate used for loading RDF triples (default: TRIPLE)\n"); + + " RDF to load an RDF document and convert it to facts\n" // + + " file: path to the file to load\n" // + + " RDF predicate: optional name of the predicate used for loading RDF\n" // + + " triples (default: TRIPLE); use ABOX to load triples\n" // + + " like OWL assertions, using unary and binary predicates\n"); } @Override From 80c9eb25ea333ae728d044256ecf1b134a9c8e89 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 26 Aug 2020 17:09:03 +0200 Subject: [PATCH 1019/1255] fix add exit command to completer --- .../client/shell/InteractiveShell.java | 6 +- .../rulewerk/client/shell/Shell.java | 49 +++++++----- .../rulewerk/client/shell/ShellTest.java | 75 +++++++++++++++++++ 3 files changed, 108 insertions(+), 22 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 478299580..e07c0fc96 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -48,14 +48,12 @@ public static void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); final Interpreter interpreter = initializeInterpreter(terminal); - + final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); - final Shell shell = new Shell(lineReader, promptProvider, interpreter); - shell.run(); - + shell.run(lineReader, promptProvider); } static Interpreter initializeInterpreter(final Terminal terminal) { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 7d67734d7..98c0a3345 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -3,6 +3,7 @@ import org.jline.reader.EndOfFileException; import org.jline.reader.LineReader; import org.jline.reader.UserInterruptException; +import org.jline.terminal.Terminal; import org.jline.utils.AttributedString; /*- @@ -36,30 +37,30 @@ public class Shell { private final Interpreter interpreter; - private final LineReader lineReader; - private final AttributedString prompt; boolean running; - public Shell(final LineReader lineReader, final AttributedString prompt, final Interpreter interpreter) { - this.lineReader = lineReader; - this.prompt = prompt; + public Shell(final Interpreter interpreter) { this.interpreter = interpreter; + this.registerExitCommand(); + } + + private void registerExitCommand() { final CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); for (final ExitCommandName exitCommandName : ExitCommandName.values()) { - interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); + this.interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); } } - public void run() { + public void run(final LineReader lineReader, final AttributedString prompt) { this.printWelcome(); this.running = true; while (this.running) { final Command command; try { - command = this.readCommand(); + command = this.readCommand(lineReader, prompt); } catch (final Exception e) { this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); e.printStackTrace(); @@ -85,10 +86,10 @@ public void run() { * * @return command or null */ - public Command readCommand() { + public Command readCommand(final LineReader lineReader, final AttributedString prompt) { String readLine; try { - readLine = this.lineReader.readLine(this.prompt.toAnsi(this.lineReader.getTerminal())); + readLine = this.readLine(lineReader, prompt); } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C @@ -102,16 +103,10 @@ public Command readCommand() { } - readLine = readLine.trim(); - if ("".equals(readLine)) { + readLine = this.processReadLine(readLine); + if (readLine.isEmpty()) { return null; } - if (readLine.charAt(0) != '@') { - readLine = "@" + readLine; - } - if (readLine.charAt(readLine.length() - 1) != '.') { - readLine = readLine + " ."; - } try { return this.interpreter.parseCommand(readLine); @@ -121,6 +116,24 @@ public Command readCommand() { } } + private String readLine(final LineReader lineReader, final AttributedString prompt) { + final Terminal terminal = lineReader.getTerminal(); + return lineReader.readLine(prompt.toAnsi(terminal)); + } + + String processReadLine(final String readLine) { + String result = readLine.trim(); + if (!result.isEmpty()) { + if (readLine.charAt(0) != '@') { + result = "@" + readLine; + } + if (readLine.charAt(readLine.length() - 1) != '.') { + result = readLine + " ."; + } + } + return result; + } + public void exitShell() { this.running = false; } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java new file mode 100644 index 000000000..321cc32a8 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -0,0 +1,75 @@ +//package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +// +//import static org.junit.Assert.assertNull; +// +//import java.io.PrintWriter; +// +//import org.jline.reader.LineReader; +//import org.jline.terminal.Terminal; +//import org.jline.utils.AttributedString; +//import org.junit.Test; +//import org.mockito.Mockito; +//import org.semanticweb.rulewerk.commands.Interpreter; +//import org.semanticweb.rulewerk.core.model.api.Command; +//import org.semanticweb.rulewerk.core.reasoner.Reasoner; +//import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +//import org.semanticweb.rulewerk.parser.ParserConfiguration; +// +//public class ShellTest { +// +// @Test +// public void testProcessLineEmpty() { +// final Terminal terminalMock = Mockito.mock(Terminal.class); +// final Interpreter interpreter = getMockInterpreter(terminalMock); +// DefaultConfiguration.buildLineReader(terminalMock, interpreter); +// final LineReader lineReaderMock = Mockito.mock(LineReader.class); +// final AttributedString prompt = Mockito.mock(AttributedString.class); +// final Shell shell = new Shell(lineReaderMock, prompt, interpreter); +// +// Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(""); +// +// final Command readCommand = shell.readCommand(); +// assertNull(readCommand); +// } +// +// +// static public Interpreter getMockInterpreter(final Terminal terminal) { +// final Reasoner reasonerMock = Mockito.mock(Reasoner.class); +// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); +// +// final Interpreter interpreter = new Interpreter(reasonerMock, new TerminalStyledPrinter(terminal), +// parserConfiguration); +// +// final PrintWriter printWriter = Mockito.mock(PrintWriter.class); +// Mockito.when(terminal.writer()).thenReturn(printWriter); +//// +//// // final TerminalStyledPrinter printer = new TerminalStyledPrinter(writer); +//// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); +//// final KnowledgeBase knowledgeBase = new KnowledgeBase(); +// +//// Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); +//// return new Interpreter(reasoner, printer, parserConfiguration); +// return interpreter; +// } +// +//} From 206c6a2f38ac32d1b1c8a060ac84c2077f5a082b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 26 Aug 2020 22:58:28 +0200 Subject: [PATCH 1020/1255] create prompt only once --- .../rulewerk/client/picocli/Main.java | 2 +- .../client/shell/DefaultConfiguration.java | 14 ++-- .../client/shell/InteractiveShell.java | 15 ++--- .../rulewerk/client/shell/Shell.java | 23 +++---- .../rulewerk/client/shell/ShellTest.java | 49 +++++++------- .../commands/ExitCommandInterpreterTest.java | 66 +++++++++++++++++++ 6 files changed, 114 insertions(+), 55 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 15a0d259f..409bd3a5b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -46,7 +46,7 @@ public static void main(final String[] args) throws IOException { configureLogging(); if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - InteractiveShell.run(); + new InteractiveShell().run(); } else { if (args[0].equals("materialize")) { final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index c53ad76be..76d8b7c3e 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -43,11 +43,6 @@ public final class DefaultConfiguration { private DefaultConfiguration() { } - public static AttributedString buildPromptProvider() { - return new AttributedString("rulewerk>", AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW)); - } - - public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) @@ -85,4 +80,13 @@ public static Terminal buildTerminal() throws IOException { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); } + public static String buildPrompt(final Terminal terminal) { + return buildPromptProvider().toAnsi(terminal); + } + + public static AttributedString buildPromptProvider() { + final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); + return new AttributedString("rulewerk>", promptStyle); + } + } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index e07c0fc96..88ca015bf 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -24,7 +24,6 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; -import org.jline.utils.AttributedString; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -39,24 +38,20 @@ public class InteractiveShell //implements Runnable { - public static void main(final String[] args) throws IOException { - run(); - } - // @Override - public static void run() throws IOException { + public void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = initializeInterpreter(terminal); + final Interpreter interpreter = this.initializeInterpreter(terminal); final Shell shell = new Shell(interpreter); final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final AttributedString promptProvider = DefaultConfiguration.buildPromptProvider(); + final String prompt = DefaultConfiguration.buildPrompt(terminal); - shell.run(lineReader, promptProvider); + shell.run(lineReader, prompt); } - static Interpreter initializeInterpreter(final Terminal terminal) { + Interpreter initializeInterpreter(final Terminal terminal) { final KnowledgeBase knowledgeBase = new KnowledgeBase(); final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 98c0a3345..4cc99d7ad 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -3,8 +3,6 @@ import org.jline.reader.EndOfFileException; import org.jline.reader.LineReader; import org.jline.reader.UserInterruptException; -import org.jline.terminal.Terminal; -import org.jline.utils.AttributedString; /*- * #%L @@ -53,7 +51,7 @@ private void registerExitCommand() { } } - public void run(final LineReader lineReader, final AttributedString prompt) { + public void run(final LineReader lineReader, final String prompt) { this.printWelcome(); this.running = true; @@ -84,12 +82,14 @@ public void run(final LineReader lineReader, final AttributedString prompt) { * made to interpret mistyped commands by adding @ and . before and after the * input, if forgotten. * + * @param prompt + * * @return command or null */ - public Command readCommand(final LineReader lineReader, final AttributedString prompt) { + public Command readCommand(final LineReader lineReader, final String prompt) { String readLine; try { - readLine = this.readLine(lineReader, prompt); + readLine = lineReader.readLine(prompt); } catch (final UserInterruptException e) { if (e.getPartialLine().isEmpty()) { // Exit request from user CTRL+C @@ -116,19 +116,14 @@ public Command readCommand(final LineReader lineReader, final AttributedString p } } - private String readLine(final LineReader lineReader, final AttributedString prompt) { - final Terminal terminal = lineReader.getTerminal(); - return lineReader.readLine(prompt.toAnsi(terminal)); - } - String processReadLine(final String readLine) { String result = readLine.trim(); if (!result.isEmpty()) { - if (readLine.charAt(0) != '@') { - result = "@" + readLine; + if (result.charAt(0) != '@') { + result = "@" + result; } - if (readLine.charAt(readLine.length() - 1) != '.') { - result = readLine + " ."; + if (result.charAt(result.length() - 1) != '.') { + result = result + " ."; } } return result; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 321cc32a8..9cd2355de 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,24 +1,24 @@ //package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ +// +///*- +// * #%L +// * Rulewerk Client +// * %% +// * Copyright (C) 2018 - 2020 Rulewerk Developers +// * %% +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// * #L% +// */ // //import static org.junit.Assert.assertNull; // @@ -43,16 +43,15 @@ // final Interpreter interpreter = getMockInterpreter(terminalMock); // DefaultConfiguration.buildLineReader(terminalMock, interpreter); // final LineReader lineReaderMock = Mockito.mock(LineReader.class); -// final AttributedString prompt = Mockito.mock(AttributedString.class); -// final Shell shell = new Shell(lineReaderMock, prompt, interpreter); +// final AttributedString prompt = Mockito.mock(AttributedString.class); +// final Shell shell = new Shell(interpreter); // -// Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(""); +// Mockito.when(lineReaderMock.readLine(Mockito.anyString())).thenReturn(""); // -// final Command readCommand = shell.readCommand(); +// final Command readCommand = shell.readCommand(lineReaderMock, prompt); // assertNull(readCommand); // } // -// // static public Interpreter getMockInterpreter(final Terminal terminal) { // final Reasoner reasonerMock = Mockito.mock(Reasoner.class); // final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java new file mode 100644 index 000000000..3590b52d3 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -0,0 +1,66 @@ +//package org.semanticweb.rulewerk.client.shell.commands; +// +///*- +// * #%L +// * Rulewerk Client +// * %% +// * Copyright (C) 2018 - 2020 Rulewerk Developers +// * %% +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// * #L% +// */ +// +//import static org.junit.Assert.assertTrue; +// +//import java.io.StringWriter; +// +//import org.junit.Test; +//import org.mockito.Mockito; +//import org.semanticweb.rulewerk.client.shell.Shell; +//import org.semanticweb.rulewerk.commands.CommandExecutionException; +//import org.semanticweb.rulewerk.commands.CommandInterpreter; +//import org.semanticweb.rulewerk.commands.Interpreter; +//import org.semanticweb.rulewerk.commands.InterpreterTest; +//import org.semanticweb.rulewerk.parser.ParsingException; +// +//public class ExitCommandInterpreterTest { +// +// @Test +// public void help_succeeds() throws ParsingException, CommandExecutionException { +// final StringWriter writer = new StringWriter(); +// final Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); +//// final CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); +//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); +// +// final Shell shellMock = Mockito.mock(Shell.class); +// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); +// commandInterpreter.printHelp("commandname", interpreter); +// +// commandInterpreter.printHelp("commandname", interpreter); +// final String result = writer.toString(); +// +// assertTrue(result.startsWith("Usage: @commandname ")); +// assertTrue(result.endsWith("\n")); +// +//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); +// } +// +// @Test +// public void synopsis_succeeds() throws ParsingException, CommandExecutionException { +// final Shell shellMock = Mockito.mock(Shell.class); +// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); +// final String synopsis = commandInterpreter.getSynopsis(); +// assertTrue(synopsis.length() < 70); +// } +// +//} From 068a985db8d379798085625bdd472772426f3409 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 00:07:01 +0200 Subject: [PATCH 1021/1255] some unit tests Shell readCommand and ExitCommandInterpreter --- .../rulewerk/client/shell/Shell.java | 1 - .../commands/ExitCommandInterpreter.java | 13 +- .../rulewerk/client/shell/ShellTest.java | 138 +++++++++------ .../commands/ExitCommandInterpreterTest.java | 162 +++++++++++------- 4 files changed, 182 insertions(+), 132 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 4cc99d7ad..7f56b3f22 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -100,7 +100,6 @@ public Command readCommand(final LineReader lineReader, final String prompt) { } catch (final EndOfFileException e) { // Exit request from user CTRL+D return ExitCommandInterpreter.EXIT_COMMAND; - } readLine = this.processReadLine(readLine); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index ea2645279..85f4edb6d 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -34,25 +34,16 @@ public class ExitCommandInterpreter implements CommandInterpreter { public static enum ExitCommandName { exit; - - public static boolean isExitCommand(final String commandName) { - for (final ExitCommandName name : values()) { - if (name.toString().equals(commandName)) { - return true; - } - } - return false; - } } final Shell shell; - public ExitCommandInterpreter(Shell shell) { + public ExitCommandInterpreter(final Shell shell) { this.shell = shell; } @Override - public void printHelp(final String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: " + commandName + ".\n"); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 9cd2355de..bb3e8a02e 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,56 +1,84 @@ -//package org.semanticweb.rulewerk.client.shell; -// -///*- -// * #%L -// * Rulewerk Client -// * %% -// * Copyright (C) 2018 - 2020 Rulewerk Developers -// * %% -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// * #L% -// */ -// -//import static org.junit.Assert.assertNull; -// -//import java.io.PrintWriter; -// -//import org.jline.reader.LineReader; -//import org.jline.terminal.Terminal; -//import org.jline.utils.AttributedString; -//import org.junit.Test; -//import org.mockito.Mockito; -//import org.semanticweb.rulewerk.commands.Interpreter; -//import org.semanticweb.rulewerk.core.model.api.Command; -//import org.semanticweb.rulewerk.core.reasoner.Reasoner; -//import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -//import org.semanticweb.rulewerk.parser.ParserConfiguration; -// -//public class ShellTest { -// -// @Test -// public void testProcessLineEmpty() { -// final Terminal terminalMock = Mockito.mock(Terminal.class); -// final Interpreter interpreter = getMockInterpreter(terminalMock); -// DefaultConfiguration.buildLineReader(terminalMock, interpreter); -// final LineReader lineReaderMock = Mockito.mock(LineReader.class); -// final AttributedString prompt = Mockito.mock(AttributedString.class); -// final Shell shell = new Shell(interpreter); -// -// Mockito.when(lineReaderMock.readLine(Mockito.anyString())).thenReturn(""); -// -// final Command readCommand = shell.readCommand(lineReaderMock, prompt); -// assertNull(readCommand); -// } +package org.semanticweb.rulewerk.client.shell; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.jline.reader.LineReader; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; + + +public class ShellTest { + + @Test + public void processReadLine_Blank() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" "); + assertEquals("", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAt() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @ "); + assertEquals("@ .", processedReadLine); + } + + @Test + public void processReadLine_EndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" . "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAtEndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @. "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" .@ "); + assertEquals("@.@ .", processedReadLine); + } + + @Test + public void readCommand_Blank() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was not called + // TODO test exceptions have not been thrown + } + + @Test + public void readCommand_Invalid() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was called + // TODO test Parsing exception has been thrown + } + // // static public Interpreter getMockInterpreter(final Terminal terminal) { // final Reasoner reasonerMock = Mockito.mock(Reasoner.class); @@ -70,5 +98,5 @@ //// return new Interpreter(reasoner, printer, parserConfiguration); // return interpreter; // } -// -//} + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index 3590b52d3..b442817b8 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -1,66 +1,98 @@ -//package org.semanticweb.rulewerk.client.shell.commands; -// -///*- -// * #%L -// * Rulewerk Client -// * %% -// * Copyright (C) 2018 - 2020 Rulewerk Developers -// * %% -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// * #L% -// */ -// -//import static org.junit.Assert.assertTrue; -// -//import java.io.StringWriter; -// -//import org.junit.Test; -//import org.mockito.Mockito; -//import org.semanticweb.rulewerk.client.shell.Shell; -//import org.semanticweb.rulewerk.commands.CommandExecutionException; -//import org.semanticweb.rulewerk.commands.CommandInterpreter; -//import org.semanticweb.rulewerk.commands.Interpreter; -//import org.semanticweb.rulewerk.commands.InterpreterTest; -//import org.semanticweb.rulewerk.parser.ParsingException; -// -//public class ExitCommandInterpreterTest { -// -// @Test -// public void help_succeeds() throws ParsingException, CommandExecutionException { -// final StringWriter writer = new StringWriter(); -// final Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); -//// final CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); -//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); -// -// final Shell shellMock = Mockito.mock(Shell.class); -// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); -// commandInterpreter.printHelp("commandname", interpreter); -// -// commandInterpreter.printHelp("commandname", interpreter); -// final String result = writer.toString(); -// -// assertTrue(result.startsWith("Usage: @commandname ")); -// assertTrue(result.endsWith("\n")); -// -//// InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); -// } -// -// @Test -// public void synopsis_succeeds() throws ParsingException, CommandExecutionException { -// final Shell shellMock = Mockito.mock(Shell.class); -// final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); -// final String synopsis = commandInterpreter.getSynopsis(); -// assertTrue(synopsis.length() < 70); -// } -// +package org.semanticweb.rulewerk.client.shell.commands; + +import static org.junit.Assert.assertEquals; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.Shell; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.commands.SimpleStyledPrinter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ExitCommandInterpreterTest { + + @Test + public void exitShell_succeeds() throws CommandExecutionException { + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + final Shell shellSpy = Mockito.spy(shell); + final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellSpy); + + commandInterpreter.run(Mockito.mock(Command.class), interpreterMock); + + Mockito.verify(shellSpy).exitShell(); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + final Shell shellMock = Mockito.mock(Shell.class); + final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final StringWriter writer = new StringWriter(); + final Interpreter interpreter = getMockInterpreter(writer); + + final Interpreter interpreterSpy = Mockito.spy(interpreter); + commandInterpreter.printHelp("commandname", interpreterSpy); + + Mockito.verify(interpreterSpy).printNormal("Usage: commandname.\n"); + + final String result = writer.toString(); + assertEquals("Usage: commandname.\n", result); + + // TODO what about testing printing to terminal? + // TODO establish test scope + } + +// static public Interpreter getMockTerminalInterpreter(final Terminal terminal) { +// final StyledPrinter printer = new TerminalStyledPrinter(terminal); +// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); +// final Reasoner reasoner = Mockito.mock(Reasoner.class); +// return new Interpreter(reasoner, printer, parserConfiguration); //} + + static public Interpreter getMockInterpreter(final Writer writer) { + final SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Reasoner reasoner = Mockito.mock(Reasoner.class); + return new Interpreter(reasoner, printer, parserConfiguration); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + final Shell shellMock = Mockito.mock(Shell.class); + final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final String synopsis = commandInterpreter.getSynopsis(); + assertTrue(synopsis.length() < 70); + } + +} From c857ab0c10f28e09a163df0ea981d1dbbc72eca8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 00:26:52 +0200 Subject: [PATCH 1022/1255] unit test read exit command --- .../rulewerk/client/shell/ShellTest.java | 64 +++++++++++++------ 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index bb3e8a02e..4fae876ff 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,14 +1,22 @@ package org.semanticweb.rulewerk.client.shell; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import java.io.PrintWriter; + import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; - +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; public class ShellTest { @@ -79,24 +87,40 @@ public void readCommand_Invalid() { // TODO test Parsing exception has been thrown } -// -// static public Interpreter getMockInterpreter(final Terminal terminal) { -// final Reasoner reasonerMock = Mockito.mock(Reasoner.class); -// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); -// -// final Interpreter interpreter = new Interpreter(reasonerMock, new TerminalStyledPrinter(terminal), -// parserConfiguration); -// -// final PrintWriter printWriter = Mockito.mock(PrintWriter.class); -// Mockito.when(terminal.writer()).thenReturn(printWriter); -//// -//// // final TerminalStyledPrinter printer = new TerminalStyledPrinter(writer); -//// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); -//// final KnowledgeBase knowledgeBase = new KnowledgeBase(); -// -//// Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); -//// return new Interpreter(reasoner, printer, parserConfiguration); -// return interpreter; -// } + @Test + public void readCommand_Exit() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + // TODO need real interpreter here + final Shell shell = new Shell(getMockInterpreter()); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + + // TODO test Parsing exception has not been thrown + // TODO test ExitCommandInterpreter.run() has been called + + assertFalse(shell.running); + } + + static public Interpreter getMockInterpreter() { + final Terminal terminal = Mockito.mock(Terminal.class); + final Reasoner reasoner = Mockito.mock(Reasoner.class); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + + final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), + parserConfiguration); + + final PrintWriter printWriter = Mockito.mock(PrintWriter.class); + Mockito.when(terminal.writer()).thenReturn(printWriter); + + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + + return interpreter; + } } From e68cf46b2156232656f5dbbfaa409619ce5dd678 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 09:57:09 +0200 Subject: [PATCH 1023/1255] more meaningful exceptions --- .../java/org/semanticweb/rulewerk/parser/RuleParser.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index c9a00c103..7043d92c4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -181,8 +181,8 @@ static T parseSyntaxFragment(final String input, SyntaxFragme result = parserAction.parse(localParser); localParser.ensureEndOfInput(); } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { - LOGGER.error("Exception while parsing " + syntaxFragmentType + ": {}!", input); - throw new ParsingException("Exception while parsing " + syntaxFragmentType, e); + LOGGER.error("Error parsing " + syntaxFragmentType + ": {}!", input); + throw new ParsingException("Error parsing " + syntaxFragmentType + ": " + e.getMessage(), e); } return result; } @@ -257,8 +257,8 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException try { parser.parse(); } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { - LOGGER.error("Exception while parsing Knowledge Base!", e); - throw new ParsingException("Exception while parsing Knowledge Base.", e); + LOGGER.error("Error parsing Knowledge Base: " + e.getMessage(), e); + throw new ParsingException(e.getMessage(), e); } KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); From 4db3b7df1a13f69595e9a808cea38c49e8d7293c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 11:01:37 +0200 Subject: [PATCH 1024/1255] allow interpreter to make reasoner; new clear command --- .../client/shell/InteractiveShell.java | 19 +- .../rulewerk/client/shell/ShellTest.java | 271 ++++++++++-------- .../commands/ExitCommandInterpreterTest.java | 4 +- .../commands/ClearCommandInterpreter.java | 57 ++++ .../rulewerk/commands/Interpreter.java | 67 ++++- .../commands/ClearCommandInterpreterTest.java | 130 +++++++++ .../rulewerk/commands/InterpreterTest.java | 82 +++--- 7 files changed, 447 insertions(+), 183 deletions(-) create mode 100644 rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java create mode 100644 rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java index 88ca015bf..0eaa6dde1 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java @@ -25,8 +25,6 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; @@ -42,20 +40,21 @@ public class InteractiveShell public void run() throws IOException { final Terminal terminal = DefaultConfiguration.buildTerminal(); - final Interpreter interpreter = this.initializeInterpreter(terminal); - final Shell shell = new Shell(interpreter); - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final String prompt = DefaultConfiguration.buildPrompt(terminal); + try (Interpreter interpreter = this.initializeInterpreter(terminal)) { + final Shell shell = new Shell(interpreter); - shell.run(lineReader, prompt); + final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); + final String prompt = DefaultConfiguration.buildPrompt(terminal); + + shell.run(lineReader, prompt); + } } Interpreter initializeInterpreter(final Terminal terminal) { - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - final Reasoner reasoner = new VLogReasoner(knowledgeBase); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), + final Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, + (knowledgeBase) -> new VLogReasoner(knowledgeBase), new TerminalStyledPrinter(terminal), parserConfiguration); return interpreter; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 4fae876ff..0393a18a0 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,126 +1,145 @@ -package org.semanticweb.rulewerk.client.shell; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; - -import java.io.PrintWriter; - -import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; -import org.junit.Test; -import org.mockito.Mockito; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; - -public class ShellTest { - - @Test - public void processReadLine_Blank() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" "); - assertEquals("", processedReadLine); - } - - @Test - public void processReadLine_StartsWithAt() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @ "); - assertEquals("@ .", processedReadLine); - } - - @Test - public void processReadLine_EndsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" . "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_StartsWithAtEndsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @. "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" .@ "); - assertEquals("@.@ .", processedReadLine); - } - - @Test - public void readCommand_Blank() { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); - - final Command command = shell.readCommand(lineReaderMock, prompt); - assertNull(command); - - // TODO test interpreter.parseCommand was not called - // TODO test exceptions have not been thrown - } - - @Test - public void readCommand_Invalid() { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); - - final Command command = shell.readCommand(lineReaderMock, prompt); - assertNull(command); - - // TODO test interpreter.parseCommand was called - // TODO test Parsing exception has been thrown - } - - @Test - public void readCommand_Exit() { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final String prompt = "myPrompt"; - // TODO need real interpreter here - final Shell shell = new Shell(getMockInterpreter()); - - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); - - final Command command = shell.readCommand(lineReaderMock, prompt); - assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); - - // TODO test Parsing exception has not been thrown - // TODO test ExitCommandInterpreter.run() has been called - - assertFalse(shell.running); - } - - static public Interpreter getMockInterpreter() { - final Terminal terminal = Mockito.mock(Terminal.class); - final Reasoner reasoner = Mockito.mock(Reasoner.class); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - - final Interpreter interpreter = new Interpreter(reasoner, new TerminalStyledPrinter(terminal), - parserConfiguration); - - final PrintWriter printWriter = Mockito.mock(PrintWriter.class); - Mockito.when(terminal.writer()).thenReturn(printWriter); - - final KnowledgeBase knowledgeBase = new KnowledgeBase(); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - - return interpreter; - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; + +import java.io.PrintWriter; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; + +public class ShellTest { + + @Test + public void processReadLine_Blank() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" "); + assertEquals("", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAt() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @ "); + assertEquals("@ .", processedReadLine); + } + + @Test + public void processReadLine_EndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" . "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_StartsWithAtEndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @. "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" .@ "); + assertEquals("@.@ .", processedReadLine); + } + + @Test + public void readCommand_Blank() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was not called + // TODO test exceptions have not been thrown + } + + @Test + public void readCommand_Invalid() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertNull(command); + + // TODO test interpreter.parseCommand was called + // TODO test Parsing exception has been thrown + } + + @Test + public void readCommand_Exit() { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + // TODO need real interpreter here + final Shell shell = new Shell(getMockInterpreter()); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + + final Command command = shell.readCommand(lineReaderMock, prompt); + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + + // TODO test Parsing exception has not been thrown + // TODO test ExitCommandInterpreter.run() has been called + + assertFalse(shell.running); + } + + static public Interpreter getMockInterpreter() { + final Terminal terminal = Mockito.mock(Terminal.class); + final PrintWriter printWriter = Mockito.mock(PrintWriter.class); + Mockito.when(terminal.writer()).thenReturn(printWriter); + + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + + final Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> { + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(kb); + return reasoner; + }, new TerminalStyledPrinter(terminal), parserConfiguration); + + return interpreter; + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index b442817b8..f6169e80b 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -83,8 +83,8 @@ public void help_succeeds() throws ParsingException, CommandExecutionException { static public Interpreter getMockInterpreter(final Writer writer) { final SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - final Reasoner reasoner = Mockito.mock(Reasoner.class); - return new Interpreter(reasoner, printer, parserConfiguration); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> Mockito.mock(Reasoner.class), printer, + parserConfiguration); } @Test diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java new file mode 100644 index 000000000..421f1d5b2 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -0,0 +1,57 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; + +public class ClearCommandInterpreter implements CommandInterpreter { + + static final String TASK_ALL = "ALL"; + static final String TASK_INFERENCES = "INF"; + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 1); + String task = Interpreter.extractNameArgument(command, 0, "task"); + if (TASK_ALL.equals(task)) { + interpreter.clearReasonerAndKnowledgeBase(); + interpreter.printNormal("Knowledge has been cleared; reasoner has been completely reset.\n"); + } else if (TASK_INFERENCES.equals(task)) { + interpreter.getReasoner().resetReasoner(); + interpreter.printNormal("Reasoner has been reset.\n"); + } else { + throw new CommandExecutionException( + "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES); + } + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " TASK\n" + // + " TASK: what to reset, ALL (knowledge base), INF (inferences)\n"); + } + + @Override + public String getSynopsis() { + return "discards the knowledge base and/or previously computed inferences"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index d75e8e235..447a50ca2 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -4,6 +4,7 @@ import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.Writer; @@ -35,6 +36,7 @@ import java.util.Set; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; @@ -46,17 +48,39 @@ import org.semanticweb.rulewerk.parser.javacc.ParseException; import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; -public class Interpreter { +public class Interpreter implements AutoCloseable { - final Reasoner reasoner; + @FunctionalInterface + public interface ReasonerProvider { + public Reasoner reasoner(KnowledgeBase knowledgeBase); + } + + @FunctionalInterface + public interface KnowledgeBaseProvider { + public KnowledgeBase knowledgeBase(); + } + + final public static KnowledgeBaseProvider EMPTY_KNOWLEDGE_BASE_PROVIDER = new KnowledgeBaseProvider() { + @Override + public KnowledgeBase knowledgeBase() { + return new KnowledgeBase(); + } + }; + + final ReasonerProvider reasonerProvider; + final KnowledgeBaseProvider knowledgeBaseProvider; + + Reasoner reasoner = null; final StyledPrinter printer; final ParserConfiguration parserConfiguration; final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); - public Interpreter(final Reasoner reasoner, final StyledPrinter printer, - final ParserConfiguration parserConfiguration) { - this.reasoner = reasoner; + public Interpreter(final KnowledgeBaseProvider knowledgeBaseProvider, final ReasonerProvider reasonerProvider, + final StyledPrinter printer, final ParserConfiguration parserConfiguration) { + this.knowledgeBaseProvider = knowledgeBaseProvider; + this.reasonerProvider = reasonerProvider; + clearReasonerAndKnowledgeBase(); this.printer = printer; this.parserConfiguration = parserConfiguration; this.registerDefaultCommandInterpreters(); @@ -157,6 +181,7 @@ private void registerDefaultCommandInterpreters() { this.registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); this.registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); this.registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + this.registerCommandInterpreter("clear", new ClearCommandInterpreter()); this.registerCommandInterpreter("reason", new ReasonCommandInterpreter()); this.registerCommandInterpreter("query", new QueryCommandInterpreter()); this.registerCommandInterpreter("export", new ExportCommandInterpreter()); @@ -231,4 +256,36 @@ public InputStream getFileInputStream(String fileName) throws FileNotFoundExcept return new FileInputStream(fileName); } + /** + * Completely resets the reasoner and knowledge base. All inferences and + * statements are cleared. + */ + public void clearReasonerAndKnowledgeBase() { + closeReasoner(); + reasoner = reasonerProvider.reasoner(knowledgeBaseProvider.knowledgeBase()); + try { + reasoner.reason(); + } catch (IOException e) { + throw new RulewerkRuntimeException("Failed to initialise reasoner: " + e.getMessage(), e); + } + } + + /** + * Frees all resources, especially those associated with reasoning. + */ + @Override + public void close() { + closeReasoner(); + } + + /** + * Closes and discards the internal {@link Reasoner}. + */ + private void closeReasoner() { + if (reasoner != null) { + reasoner.close(); + reasoner = null; + } + } + } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java new file mode 100644 index 000000000..3baf1df69 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -0,0 +1,130 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.Arrays; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ClearCommandInterpreterTest { + + @Test + public void correctUseAll_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = Mockito.spy(InterpreterTest.getMockInterpreter(writer)); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + assertEquals(1, interpreter.getKnowledgeBase().getFacts().size()); + + Command command = interpreter.parseCommand("@clear ALL ."); + interpreter.runCommand(command); + + assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + Mockito.verify(interpreter).clearReasonerAndKnowledgeBase(); + } + + @Test + public void correctUseInf_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = Mockito.spy(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, + parserConfiguration)) { + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@clear INF ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + Mockito.verify(reasoner).resetReasoner(); + } + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void unkonwnTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear UNKNOWNTASK ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ClearCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ClearCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java index 74e517e33..443ed8baa 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java @@ -40,10 +40,11 @@ public class InterpreterTest { static public Interpreter getMockInterpreter(Writer writer) { SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - KnowledgeBase knowledgeBase = new KnowledgeBase(); - Reasoner reasoner = Mockito.mock(Reasoner.class); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - return new Interpreter(reasoner, printer, parserConfiguration); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, printer, parserConfiguration); } /** @@ -73,45 +74,47 @@ public void getters_succeed() { StringWriter writer = new StringWriter(); SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - KnowledgeBase knowledgeBase = new KnowledgeBase(); - Reasoner reasoner = Mockito.mock(Reasoner.class); + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = Mockito.mock(Reasoner.class); Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); - - assertEquals(knowledgeBase, interpreter.getKnowledgeBase()); - assertEquals(reasoner, interpreter.getReasoner()); - assertEquals(writer, interpreter.getWriter()); - assertEquals(parserConfiguration, interpreter.getParserConfiguration()); + try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, + parserConfiguration)) { + assertEquals(knowledgeBase, interpreter.getKnowledgeBase()); + assertEquals(reasoner, interpreter.getReasoner()); + assertEquals(writer, interpreter.getWriter()); + assertEquals(parserConfiguration, interpreter.getParserConfiguration()); + } } @Test(expected = CommandExecutionException.class) public void unknownCommand_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); - Interpreter interpreter = getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@unknown ."); - interpreter.runCommand(command); + try (Interpreter interpreter = getMockInterpreter(writer)) { + Command command = interpreter.parseCommand("@unknown ."); + interpreter.runCommand(command); + } } @Test(expected = ParsingException.class) public void malformedCommand_fails() throws ParsingException { StringWriter writer = new StringWriter(); - Interpreter interpreter = getMockInterpreter(writer); - - interpreter.parseCommand("malformed ."); + try (Interpreter interpreter = getMockInterpreter(writer)) { + interpreter.parseCommand("malformed ."); + } } @Test public void prefixesAreUsed_succeeds() throws ParsingException, PrefixDeclarationException { StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); - Command command = interpreter.parseCommand("@somecommand eg:test ."); + Command command = interpreter.parseCommand("@somecommand eg:test ."); - assertEquals(1, command.getArguments().size()); - assertTrue(command.getArguments().get(0).fromTerm().isPresent()); - assertEquals("http://example.org/test", command.getArguments().get(0).fromTerm().get().getName()); + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertEquals("http://example.org/test", command.getArguments().get(0).fromTerm().get().getName()); + } } @Test @@ -119,21 +122,20 @@ public void print_succeeds() { StringWriter writer = new StringWriter(); SimpleStyledPrinter printer = Mockito.spy(new SimpleStyledPrinter(writer)); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - Reasoner reasoner = Mockito.mock(Reasoner.class); - Interpreter interpreter = new Interpreter(reasoner, printer, parserConfiguration); - - interpreter.printCode("Code"); - interpreter.printNormal("Normal"); - interpreter.printEmph("Emph"); - interpreter.printSection("Section"); - interpreter.printImportant("Important"); - - Mockito.verify(printer).printCode("Code"); - Mockito.verify(printer).printNormal("Normal"); - Mockito.verify(printer).printEmph("Emph"); - Mockito.verify(printer).printSection("Section"); - Mockito.verify(printer).printImportant("Important"); - + try (Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, + (kb) -> Mockito.mock(Reasoner.class), printer, parserConfiguration)) { + interpreter.printCode("Code"); + interpreter.printNormal("Normal"); + interpreter.printEmph("Emph"); + interpreter.printSection("Section"); + interpreter.printImportant("Important"); + + Mockito.verify(printer).printCode("Code"); + Mockito.verify(printer).printNormal("Normal"); + Mockito.verify(printer).printEmph("Emph"); + Mockito.verify(printer).printSection("Section"); + Mockito.verify(printer).printImportant("Important"); + } } } From 9520d6049fb1b01c21d9dc733158dad18267dcd0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 11:07:48 +0200 Subject: [PATCH 1025/1255] Better help --- .../rulewerk/commands/ClearCommandInterpreter.java | 2 +- .../semanticweb/rulewerk/commands/LoadCommandInterpreter.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 421f1d5b2..f36c78dfb 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -33,7 +33,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio String task = Interpreter.extractNameArgument(command, 0, "task"); if (TASK_ALL.equals(task)) { interpreter.clearReasonerAndKnowledgeBase(); - interpreter.printNormal("Knowledge has been cleared; reasoner has been completely reset.\n"); + interpreter.printNormal("Knowledge base has been cleared; reasoner has been completely reset.\n"); } else if (TASK_INFERENCES.equals(task)) { interpreter.getReasoner().resetReasoner(); interpreter.printNormal("Reasoner has been reset.\n"); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index a5dd3196c..62878f8be 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -199,12 +199,12 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " [TASK] [RDF predicate]\n" // + interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate]\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // + " RDF to load an RDF document and convert it to facts\n" // - + " file: path to the file to load\n" // + + " \"file\": path to the file to load, enclosed in quotes\n" // + " RDF predicate: optional name of the predicate used for loading RDF\n" // + " triples (default: TRIPLE); use ABOX to load triples\n" // + " like OWL assertions, using unary and binary predicates\n"); From f1fac9a57b1bd672c85255de08d9d8f111fcb889 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 11:27:18 +0200 Subject: [PATCH 1026/1255] support clear FACTS and clear RULES --- .../commands/ClearCommandInterpreter.java | 14 ++++ .../commands/ClearCommandInterpreterTest.java | 82 +++++++++++++++++-- 2 files changed, 88 insertions(+), 8 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index f36c78dfb..6c4e3c17f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -21,11 +21,15 @@ */ import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; public class ClearCommandInterpreter implements CommandInterpreter { static final String TASK_ALL = "ALL"; static final String TASK_INFERENCES = "INF"; + static final String TASK_FACTS = "FACTS"; + static final String TASK_RULES = "RULES"; @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { @@ -37,6 +41,16 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } else if (TASK_INFERENCES.equals(task)) { interpreter.getReasoner().resetReasoner(); interpreter.printNormal("Reasoner has been reset.\n"); + } else if (TASK_FACTS.equals(task)) { + for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + interpreter.getKnowledgeBase().removeStatement(fact); + } + interpreter.printNormal("All facts have been removed from the knowledge base.\n"); + } else if (TASK_RULES.equals(task)) { + for (Rule rule : interpreter.getKnowledgeBase().getRules()) { + interpreter.getKnowledgeBase().removeStatement(rule); + } + interpreter.printNormal("All rules have been removed from the knowledge base.\n"); } else { throw new CommandExecutionException( "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES); diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java index 3baf1df69..afbce0729 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -30,7 +30,9 @@ import org.mockito.Mockito; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -45,10 +47,17 @@ public class ClearCommandInterpreterTest { public void correctUseAll_succeeds() throws ParsingException, CommandExecutionException, IOException { StringWriter writer = new StringWriter(); Interpreter interpreter = Mockito.spy(InterpreterTest.getMockInterpreter(writer)); - Predicate predicate = Expressions.makePredicate("p", 1); - Term term = Expressions.makeAbstractConstant("a"); - Fact fact = Expressions.makeFact(predicate, term); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); assertEquals(1, interpreter.getKnowledgeBase().getFacts().size()); @@ -71,21 +80,78 @@ public void correctUseInf_succeeds() throws ParsingException, CommandExecutionEx Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, parserConfiguration)) { - Predicate predicate = Expressions.makePredicate("p", 1); - Term term = Expressions.makeAbstractConstant("a"); - Fact fact = Expressions.makeFact(predicate, term); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); Command command = interpreter.parseCommand("@clear INF ."); interpreter.runCommand(command); assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); - assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); Mockito.verify(reasoner).resetReasoner(); } } + @Test + public void correctUseFacts_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@clear FACTS ."); + interpreter.runCommand(command); + + assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + } + + @Test + public void correctUseRules_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@clear RULES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + } + @Test(expected = CommandExecutionException.class) public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); @@ -103,7 +169,7 @@ public void wrongArgumentType_fails() throws ParsingException, CommandExecutionE Command command = interpreter.parseCommand("@clear \"string\" ."); interpreter.runCommand(command); } - + @Test(expected = CommandExecutionException.class) public void unkonwnTask_fails() throws ParsingException, CommandExecutionException { StringWriter writer = new StringWriter(); From 48739a9e5fd16e65c4e1bad3068dfd7e00525878 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 12:07:24 +0200 Subject: [PATCH 1027/1255] support @clear PREFIXES command --- .../commands/ClearCommandInterpreter.java | 17 ++- .../commands/ClearCommandInterpreterTest.java | 121 +++++++++++------- .../model/api/PrefixDeclarationRegistry.java | 7 +- .../AbstractPrefixDeclarationRegistry.java | 6 + .../MergingPrefixDeclarationRegistryTest.java | 8 ++ 5 files changed, 105 insertions(+), 54 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 6c4e3c17f..6644e3bed 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -30,6 +30,7 @@ public class ClearCommandInterpreter implements CommandInterpreter { static final String TASK_INFERENCES = "INF"; static final String TASK_FACTS = "FACTS"; static final String TASK_RULES = "RULES"; + static final String TASK_PREFIXES = "PREFIXES"; @Override public void run(Command command, Interpreter interpreter) throws CommandExecutionException { @@ -51,16 +52,24 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getKnowledgeBase().removeStatement(rule); } interpreter.printNormal("All rules have been removed from the knowledge base.\n"); + } else if (TASK_PREFIXES.equals(task)) { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().clear(); + interpreter.printNormal("All prefixes and the base namespace have been removed from the knowledge base.\n"); } else { - throw new CommandExecutionException( - "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES); + throw new CommandExecutionException("Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + + ", " + TASK_INFERENCES + ", " + TASK_FACTS + ", " + TASK_RULES + ", " + TASK_PREFIXES); } } @Override public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " TASK\n" + // - " TASK: what to reset, ALL (knowledge base), INF (inferences)\n"); + interpreter.printNormal("Usage: @" + commandName + " TASK\n" // + + " TASK: what to reset, possuble values:\n" // + + " ALL: empty knowledge base and completely reset reasoner\n" // + + " INF: reset reasoner to clear all loaded data and inferences\n" // + + " FACTS: remove all facts from knowledge base\n" // + + " RULES: remove all rules from knowledge base\n" // + + " PREFIXES: undeclare all prefixes and base namespace\n"); } @Override diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java index afbce0729..b9f2fe29d 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -22,16 +22,24 @@ import static org.junit.Assert.*; -import java.io.IOException; import java.io.StringWriter; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import org.junit.Test; import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -43,21 +51,39 @@ public class ClearCommandInterpreterTest { + static Term a = Expressions.makeAbstractConstant("a"); + static Term x = Expressions.makeUniversalVariable("X"); + static Predicate p = Expressions.makePredicate("p", 1); + static Predicate q = Expressions.makePredicate("q", 1); + static Predicate r = Expressions.makePredicate("r", 1); + static Fact fact = Expressions.makeFact(p, a); + static PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + static PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + static Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + static Map standardPrefixes = new HashMap<>(); + static { + standardPrefixes.put("eg:", "http://example.org/"); + } + + private void prepareKnowledgeBase(KnowledgeBase knowledgeBase) throws PrefixDeclarationException { + knowledgeBase.addStatement(fact); + knowledgeBase.addStatement(rule); + knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + } + + private void assertPrefixesEqual(Map expectedPrefixes, + PrefixDeclarationRegistry prefixDeclarationRegistry) { + Set> prefixes = StreamSupport.stream(prefixDeclarationRegistry.spliterator(), false) + .collect(Collectors.toSet()); + assertEquals(expectedPrefixes.entrySet(), prefixes); + } + @Test - public void correctUseAll_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseAll_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); Interpreter interpreter = Mockito.spy(InterpreterTest.getMockInterpreter(writer)); - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); assertEquals(1, interpreter.getKnowledgeBase().getFacts().size()); @@ -67,11 +93,13 @@ public void correctUseAll_succeeds() throws ParsingException, CommandExecutionEx assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Mockito.verify(interpreter).clearReasonerAndKnowledgeBase(); } @Test - public void correctUseInf_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseInf_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); @@ -80,17 +108,7 @@ public void correctUseInf_succeeds() throws ParsingException, CommandExecutionEx Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, parserConfiguration)) { - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear INF ."); interpreter.runCommand(command); @@ -98,25 +116,17 @@ public void correctUseInf_succeeds() throws ParsingException, CommandExecutionEx assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Mockito.verify(reasoner).resetReasoner(); } } @Test - public void correctUseFacts_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseFacts_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear FACTS ."); interpreter.runCommand(command); @@ -124,24 +134,16 @@ public void correctUseFacts_succeeds() throws ParsingException, CommandExecution assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } @Test - public void correctUseRules_succeeds() throws ParsingException, CommandExecutionException, IOException { + public void correctUseRules_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { StringWriter writer = new StringWriter(); try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear RULES ."); interpreter.runCommand(command); @@ -149,6 +151,27 @@ public void correctUseRules_succeeds() throws ParsingException, CommandExecution assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test + public void correctUsePrefixes_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(rule); + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + + Command command = interpreter.parseCommand("@clear PREFIXES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 37d62280e..1532c706c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -44,13 +44,18 @@ public interface PrefixDeclarationRegistry extends Iterable(); + } + @Override public String getBaseIri() { if (baseIri == null) { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index cc46e3035..157fbfded 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -105,6 +105,14 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce assertEquals(2, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } + @Test + public void clearPrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("another:", MORE_SPECIFIC); + prefixDeclarations.clear(); + assertEquals(0, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); + } + @Test public void setPrefixIri_setSamePrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefixIri("eg:", BASE); From 7d9858e07ea6f05a3d133e5f5f910f23c6e553b1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 12:16:40 +0200 Subject: [PATCH 1028/1255] support clear DATASOURCES --- .../commands/ClearCommandInterpreter.java | 16 ++++++-- .../commands/ClearCommandInterpreterTest.java | 37 ++++++++++++++++--- 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 6644e3bed..b27bda588 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -21,6 +21,7 @@ */ import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Rule; @@ -30,6 +31,7 @@ public class ClearCommandInterpreter implements CommandInterpreter { static final String TASK_INFERENCES = "INF"; static final String TASK_FACTS = "FACTS"; static final String TASK_RULES = "RULES"; + static final String TASK_SOURCES = "DATASOURCES"; static final String TASK_PREFIXES = "PREFIXES"; @Override @@ -52,12 +54,19 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getKnowledgeBase().removeStatement(rule); } interpreter.printNormal("All rules have been removed from the knowledge base.\n"); + } else if (TASK_SOURCES.equals(task)) { + for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + .getDataSourceDeclarations()) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + } + interpreter.printNormal("All datasource declarations have been removed from the knowledge base.\n"); } else if (TASK_PREFIXES.equals(task)) { interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().clear(); interpreter.printNormal("All prefixes and the base namespace have been removed from the knowledge base.\n"); } else { - throw new CommandExecutionException("Task \"" + task + "\" not supported; should be one of: " + TASK_ALL - + ", " + TASK_INFERENCES + ", " + TASK_FACTS + ", " + TASK_RULES + ", " + TASK_PREFIXES); + throw new CommandExecutionException( + "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES + + ", " + TASK_FACTS + ", " + TASK_RULES + ", " + TASK_SOURCES + ", " + TASK_PREFIXES); } } @@ -69,12 +78,13 @@ public void printHelp(String commandName, Interpreter interpreter) { + " INF: reset reasoner to clear all loaded data and inferences\n" // + " FACTS: remove all facts from knowledge base\n" // + " RULES: remove all rules from knowledge base\n" // + + " DATASOURCES: remove all data source declarations from knowledge base\n" // + " PREFIXES: undeclare all prefixes and base namespace\n"); } @Override public String getSynopsis() { - return "discards the knowledge base and/or previously computed inferences"; + return "discards (parts of) the knowledge base or computed inferences"; } } diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java index b9f2fe29d..207cc68e1 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -36,12 +36,15 @@ import org.mockito.Mockito; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -64,10 +67,13 @@ public class ClearCommandInterpreterTest { static { standardPrefixes.put("eg:", "http://example.org/"); } + static DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(p, + Mockito.mock(DataSource.class)); private void prepareKnowledgeBase(KnowledgeBase knowledgeBase) throws PrefixDeclarationException { knowledgeBase.addStatement(fact); knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(dataSourceDeclaration); knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); } @@ -115,7 +121,8 @@ public void correctUseInf_succeeds() assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); - assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); Mockito.verify(reasoner).resetReasoner(); } @@ -133,7 +140,8 @@ public void correctUseFacts_succeeds() assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); - assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } @@ -150,6 +158,24 @@ public void correctUseRules_succeeds() assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test + public void correctUseSources_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + Command command = interpreter.parseCommand("@clear DATASOURCES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } @@ -161,16 +187,15 @@ public void correctUsePrefixes_succeeds() StringWriter writer = new StringWriter(); try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(rule); - interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); Command command = interpreter.parseCommand("@clear PREFIXES ."); interpreter.runCommand(command); assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); - assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); } } From 03946ac2b1d91cc6a5e9735299af94f7ef51fd29 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 14:29:43 +0200 Subject: [PATCH 1029/1255] test read unparseable command --- .../rulewerk/client/shell/ShellTest.java | 75 +++++++++++-------- .../rulewerk/client/shell/ShellTestUtils.java | 32 ++++++++ .../commands/ExitCommandInterpreterTest.java | 30 ++------ 3 files changed, 80 insertions(+), 57 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 0393a18a0..5c5b2fd72 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -20,23 +20,21 @@ * #L% */ - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; -import java.io.PrintWriter; +import java.io.StringWriter; import org.jline.reader.LineReader; -import org.jline.terminal.Terminal; import org.junit.Test; import org.mockito.Mockito; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; public class ShellTest { @@ -92,54 +90,67 @@ public void readCommand_Blank() { } @Test - public void readCommand_Invalid() { + public void readCommand_Unknown() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("invalid"); + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("unknown"); final Command command = shell.readCommand(lineReaderMock, prompt); - assertNull(command); - // TODO test interpreter.parseCommand was called - // TODO test Parsing exception has been thrown + Mockito.verify(interpreterSpy).parseCommand("@unknown ."); + assertEquals("unknown", command.getName()); + assertTrue(command.getArguments().isEmpty()); + + // TODO test Parsing exception has not been thrown } @Test - public void readCommand_Exit() { + public void readCommand_ParsingException() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); final String prompt = "myPrompt"; - // TODO need real interpreter here - final Shell shell = new Shell(getMockInterpreter()); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); - final Command command = shell.readCommand(lineReaderMock, prompt); - assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("@"); - // TODO test Parsing exception has not been thrown - // TODO test ExitCommandInterpreter.run() has been called + final Command command = shell.readCommand(lineReaderMock, prompt); - assertFalse(shell.running); + Mockito.verify(interpreterSpy).parseCommand("@ ."); + assertNull(command); + + // TODO test Parsing exception has been thrown + assertTrue(stringWriter.toString().startsWith("Error: ")); } - static public Interpreter getMockInterpreter() { - final Terminal terminal = Mockito.mock(Terminal.class); - final PrintWriter printWriter = Mockito.mock(PrintWriter.class); - Mockito.when(terminal.writer()).thenReturn(printWriter); + @Test + public void readCommand_Exit() throws CommandExecutionException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final String prompt = "myPrompt"; + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreterMock = ShellTestUtils.getMockInterpreter(stringWriter); + final Shell shell = new Shell(interpreterMock); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); - final Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> { - Reasoner reasoner = Mockito.mock(Reasoner.class); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(kb); - return reasoner; - }, new TerminalStyledPrinter(terminal), parserConfiguration); + final Command command = shell.readCommand(lineReaderMock, prompt); + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + assertTrue(command.getArguments().isEmpty()); - return interpreter; + // TODO test Parsing exception has not been thrown + assertFalse(shell.running); } } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java new file mode 100644 index 000000000..8cb2f27e0 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -0,0 +1,32 @@ +package org.semanticweb.rulewerk.client.shell; + +import java.io.PrintWriter; +import java.io.Writer; + +import org.jline.terminal.Terminal; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; + +public final class ShellTestUtils { + + private ShellTestUtils() { + } + + public static Interpreter getMockInterpreter(final Writer writer) { + final Terminal terminalMock = Mockito.mock(Terminal.class); + final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminalMock); + final PrintWriter printWriter = new PrintWriter(writer); + Mockito.when(terminalMock.writer()).thenReturn(printWriter); + + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + final Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, terminalStyledPrinter, parserConfiguration); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index f6169e80b..33da1aa4c 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -25,19 +25,15 @@ import static org.junit.Assert.assertTrue; import java.io.StringWriter; -import java.io.Writer; import org.junit.Test; import org.mockito.Mockito; import org.semanticweb.rulewerk.client.shell.Shell; +import org.semanticweb.rulewerk.client.shell.ShellTestUtils; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.CommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.commands.SimpleStyledPrinter; import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; public class ExitCommandInterpreterTest { @@ -47,7 +43,7 @@ public void exitShell_succeeds() throws CommandExecutionException { final Interpreter interpreterMock = Mockito.mock(Interpreter.class); final Shell shell = new Shell(interpreterMock); final Shell shellSpy = Mockito.spy(shell); - final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellSpy); + final ExitCommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellSpy); commandInterpreter.run(Mockito.mock(Command.class), interpreterMock); @@ -57,9 +53,10 @@ public void exitShell_succeeds() throws CommandExecutionException { @Test public void help_succeeds() throws ParsingException, CommandExecutionException { final Shell shellMock = Mockito.mock(Shell.class); - final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final ExitCommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final StringWriter writer = new StringWriter(); - final Interpreter interpreter = getMockInterpreter(writer); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); final Interpreter interpreterSpy = Mockito.spy(interpreter); commandInterpreter.printHelp("commandname", interpreterSpy); @@ -68,23 +65,6 @@ public void help_succeeds() throws ParsingException, CommandExecutionException { final String result = writer.toString(); assertEquals("Usage: commandname.\n", result); - - // TODO what about testing printing to terminal? - // TODO establish test scope - } - -// static public Interpreter getMockTerminalInterpreter(final Terminal terminal) { -// final StyledPrinter printer = new TerminalStyledPrinter(terminal); -// final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); -// final Reasoner reasoner = Mockito.mock(Reasoner.class); -// return new Interpreter(reasoner, printer, parserConfiguration); -//} - - static public Interpreter getMockInterpreter(final Writer writer) { - final SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (kb) -> Mockito.mock(Reasoner.class), printer, - parserConfiguration); } @Test From af62a1d61c6efa6c15e4f49ee3f6d0989a6759a1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 15:08:55 +0200 Subject: [PATCH 1030/1255] test read command interrupt request --- .../commands/ExitCommandInterpreter.java | 2 +- .../rulewerk/client/shell/ShellTest.java | 108 ++++++++++++------ .../rulewerk/client/shell/ShellTestUtils.java | 30 +++++ 3 files changed, 104 insertions(+), 36 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 85f4edb6d..706275678 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -30,7 +30,7 @@ public class ExitCommandInterpreter implements CommandInterpreter { - public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>()); + public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>(0)); public static enum ExitCommandName { exit; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 5c5b2fd72..32004381b 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -21,16 +21,16 @@ */ import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.StringWriter; +import org.jline.reader.EndOfFileException; import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; import org.junit.Test; import org.mockito.Mockito; -import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.commands.CommandExecutionException; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; @@ -38,71 +38,71 @@ public class ShellTest { + private final String prompt = "myPrompt"; + @Test - public void processReadLine_Blank() { + public void processReadLine_blank() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" "); assertEquals("", processedReadLine); } @Test - public void processReadLine_StartsWithAt() { + public void processReadLine_startsWithAt() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" @ "); assertEquals("@ .", processedReadLine); } @Test - public void processReadLine_EndsWithStop() { + public void processReadLine_endsWithStop() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" . "); assertEquals("@.", processedReadLine); } @Test - public void processReadLine_StartsWithAtEndsWithStop() { + public void processReadLine_startsWithAtEndsWithStop() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" @. "); assertEquals("@.", processedReadLine); } @Test - public void processReadLine_DoesNotStartWithAt_DoesNotEndWithStop() { + public void processReadLine_doesNotStartWithAt_DoesNotEndWithStop() { final Shell shell = new Shell(Mockito.mock(Interpreter.class)); final String processedReadLine = shell.processReadLine(" .@ "); assertEquals("@.@ .", processedReadLine); } @Test - public void readCommand_Blank() { + public void readCommand_blank() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn(" "); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn(" "); - final Command command = shell.readCommand(lineReaderMock, prompt); + final Command command = shell.readCommand(lineReaderMock, this.prompt); assertNull(command); - // TODO test interpreter.parseCommand was not called + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); // TODO test exceptions have not been thrown } @Test - public void readCommand_Unknown() throws ParsingException { + public void readCommand_unknown() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final StringWriter stringWriter = new StringWriter(); final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); final Interpreter interpreterSpy = Mockito.spy(interpreter); final Shell shell = new Shell(interpreterSpy); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("unknown"); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("unknown"); - final Command command = shell.readCommand(lineReaderMock, prompt); + final Command command = shell.readCommand(lineReaderMock, this.prompt); Mockito.verify(interpreterSpy).parseCommand("@unknown ."); assertEquals("unknown", command.getName()); @@ -112,45 +112,83 @@ public void readCommand_Unknown() throws ParsingException { } @Test - public void readCommand_ParsingException() throws ParsingException { + public void readCommand_parsingException() throws ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final StringWriter stringWriter = new StringWriter(); final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); final Interpreter interpreterSpy = Mockito.spy(interpreter); final Shell shell = new Shell(interpreterSpy); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("@"); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("@"); - final Command command = shell.readCommand(lineReaderMock, prompt); + final Command command = shell.readCommand(lineReaderMock, this.prompt); Mockito.verify(interpreterSpy).parseCommand("@ ."); assertNull(command); - + // TODO test Parsing exception has been thrown - assertTrue(stringWriter.toString().startsWith("Error: ")); + assertTrue(stringWriter.toString().startsWith("Error: failed to parse command")); } @Test - public void readCommand_Exit() throws CommandExecutionException { + public void readCommand_exit() throws CommandExecutionException, ParsingException { final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final String prompt = "myPrompt"; - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreterMock = ShellTestUtils.getMockInterpreter(stringWriter); - final Shell shell = new Shell(interpreterMock); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); - Mockito.when(lineReaderMock.readLine(prompt)).thenReturn("exit"); + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("exit"); - final Command command = shell.readCommand(lineReaderMock, prompt); - assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); - assertTrue(command.getArguments().isEmpty()); + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + Mockito.verify(interpreterSpy).parseCommand("@exit ."); // TODO test Parsing exception has not been thrown - assertFalse(shell.running); + } + + @Test + public void readCommand_interruptRequest_CTRLC_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException("")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLC_nonEmptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException(" ")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLD_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(EndOfFileException.class).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); } } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index 8cb2f27e0..189a83607 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -1,11 +1,36 @@ package org.semanticweb.rulewerk.client.shell; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.PrintWriter; import java.io.Writer; import org.jline.terminal.Terminal; import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; import org.semanticweb.rulewerk.parser.ParserConfiguration; @@ -29,4 +54,9 @@ public static Interpreter getMockInterpreter(final Writer writer) { }, terminalStyledPrinter, parserConfiguration); } + public static void testIsExitCommand(final Command command) { + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + assertTrue(command.getArguments().isEmpty()); + } + } From 74ea88e11857a8eee41d390cb89a9499dcb52372 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 27 Aug 2020 15:46:15 +0200 Subject: [PATCH 1031/1255] space after prompt --- .../semanticweb/rulewerk/client/shell/DefaultConfiguration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 76d8b7c3e..c1a026dc4 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -86,7 +86,7 @@ public static String buildPrompt(final Terminal terminal) { public static AttributedString buildPromptProvider() { final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); - return new AttributedString("rulewerk>", promptStyle); + return new AttributedString("rulewerk> ", promptStyle); } } From 07607529b6a04a7774314353c961cc67bc6e05c9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Aug 2020 17:22:04 +0200 Subject: [PATCH 1032/1255] modify gitignore delete test output files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8454baf10..b052b97ca 100644 --- a/.gitignore +++ b/.gitignore @@ -53,5 +53,6 @@ rulewerk-core/src/test/data/output/* rulewerk-examples/src/main/data/output/* rulewerk-examples/src/main/data/logs/* rulewerk-rdf/src/main/data/output/* +rulewerk-vlog/src/test/data/output/* /build-vlog/vlog/ /TAGS From e0e1710a07e4f4502ca5be25d1863ca5a2689fc8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 12:17:51 +0200 Subject: [PATCH 1033/1255] unit test run commands --- .../rulewerk/client/shell/Shell.java | 40 +- .../rulewerk/client/shell/ShellTest.java | 520 +++++++++++------- 2 files changed, 350 insertions(+), 210 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 7f56b3f22..e4c82fc63 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -36,7 +36,7 @@ public class Shell { private final Interpreter interpreter; - boolean running; + private boolean running; public Shell(final Interpreter interpreter) { this.interpreter = interpreter; @@ -56,24 +56,28 @@ public void run(final LineReader lineReader, final String prompt) { this.running = true; while (this.running) { - final Command command; - try { - command = this.readCommand(lineReader, prompt); - } catch (final Exception e) { - this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); - e.printStackTrace(); - continue; - } + this.runCommand(lineReader, prompt); + } + this.interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + + Command runCommand(final LineReader lineReader, final String prompt) { + Command command = null; + try { + command = this.readCommand(lineReader, prompt); + } catch (final Exception e) { + this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); + e.printStackTrace(); + } - if (command != null) { - try { - this.interpreter.runCommand(command); - } catch (final CommandExecutionException e) { - this.interpreter.printNormal("Error: " + e.getMessage() + "\n"); - } + if (command != null) { + try { + this.interpreter.runCommand(command); + } catch (final CommandExecutionException e) { + this.interpreter.printNormal("Error: " + e.getMessage() + "\n"); } } - this.interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + return command; } /** @@ -142,4 +146,8 @@ private void printWelcome() { this.interpreter.printNormal("\n"); } + boolean isRunning() { + return this.running; + } + } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index 32004381b..d5f1d3e18 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -1,194 +1,326 @@ -package org.semanticweb.rulewerk.client.shell; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -import java.io.StringWriter; - -import org.jline.reader.EndOfFileException; -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; -import org.junit.Test; -import org.mockito.Mockito; -import org.semanticweb.rulewerk.commands.CommandExecutionException; -import org.semanticweb.rulewerk.commands.Interpreter; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class ShellTest { - - private final String prompt = "myPrompt"; - - @Test - public void processReadLine_blank() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" "); - assertEquals("", processedReadLine); - } - - @Test - public void processReadLine_startsWithAt() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @ "); - assertEquals("@ .", processedReadLine); - } - - @Test - public void processReadLine_endsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" . "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_startsWithAtEndsWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" @. "); - assertEquals("@.", processedReadLine); - } - - @Test - public void processReadLine_doesNotStartWithAt_DoesNotEndWithStop() { - final Shell shell = new Shell(Mockito.mock(Interpreter.class)); - final String processedReadLine = shell.processReadLine(" .@ "); - assertEquals("@.@ .", processedReadLine); - } - - @Test - public void readCommand_blank() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn(" "); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - assertNull(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - // TODO test exceptions have not been thrown - } - - @Test - public void readCommand_unknown() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); - final Interpreter interpreterSpy = Mockito.spy(interpreter); - final Shell shell = new Shell(interpreterSpy); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("unknown"); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - - Mockito.verify(interpreterSpy).parseCommand("@unknown ."); - assertEquals("unknown", command.getName()); - assertTrue(command.getArguments().isEmpty()); - - // TODO test Parsing exception has not been thrown - } - - @Test - public void readCommand_parsingException() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); - final Interpreter interpreterSpy = Mockito.spy(interpreter); - final Shell shell = new Shell(interpreterSpy); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("@"); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - - Mockito.verify(interpreterSpy).parseCommand("@ ."); - assertNull(command); - - // TODO test Parsing exception has been thrown - assertTrue(stringWriter.toString().startsWith("Error: failed to parse command")); - } - - @Test - public void readCommand_exit() throws CommandExecutionException, ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - - final StringWriter stringWriter = new StringWriter(); - final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); - final Interpreter interpreterSpy = Mockito.spy(interpreter); - final Shell shell = new Shell(interpreterSpy); - - Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("exit"); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - ShellTestUtils.testIsExitCommand(command); - Mockito.verify(interpreterSpy).parseCommand("@exit ."); - - // TODO test Parsing exception has not been thrown - } - - @Test - public void readCommand_interruptRequest_CTRLC_emptyPartialLine() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.doThrow(new UserInterruptException("")).when(lineReaderMock).readLine(this.prompt); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - ShellTestUtils.testIsExitCommand(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - } - - @Test - public void readCommand_interruptRequest_CTRLC_nonEmptyPartialLine() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.doThrow(new UserInterruptException(" ")).when(lineReaderMock).readLine(this.prompt); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - assertNull(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - } - - @Test - public void readCommand_interruptRequest_CTRLD_emptyPartialLine() throws ParsingException { - final LineReader lineReaderMock = Mockito.mock(LineReader.class); - final Interpreter interpreterMock = Mockito.mock(Interpreter.class); - final Shell shell = new Shell(interpreterMock); - - Mockito.doThrow(EndOfFileException.class).when(lineReaderMock).readLine(this.prompt); - - final Command command = shell.readCommand(lineReaderMock, this.prompt); - ShellTestUtils.testIsExitCommand(command); - - Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); - } - -} +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.StringWriter; +import java.io.Writer; + +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ShellTest { + + private final String prompt = "myPrompt"; + + @Test + public void processReadLine_blank() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" "); + assertEquals("", processedReadLine); + } + + @Test + public void processReadLine_startsWithAt() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @ "); + assertEquals("@ .", processedReadLine); + } + + @Test + public void processReadLine_endsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" . "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_startsWithAtEndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @. "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_doesNotStartWithAt_DoesNotEndWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" .@ "); + assertEquals("@.@ .", processedReadLine); + } + + @Test + public void readCommand_blank() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn(" "); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + // TODO test exceptions have not been thrown + } + + @Test + public void readCommand_unknown() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("unknown"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + + Mockito.verify(interpreterSpy).parseCommand("@unknown ."); + assertEquals("unknown", command.getName()); + assertTrue(command.getArguments().isEmpty()); + + // TODO test Parsing exception has not been thrown + } + + @Test + public void readCommand_parsingException() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("@"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + + Mockito.verify(interpreterSpy).parseCommand("@ ."); + assertNull(command); + + // TODO test Parsing exception has been thrown + assertTrue(stringWriter.toString().startsWith("Error: failed to parse command")); + } + + @Test + public void readCommand_exit() throws CommandExecutionException, ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("exit"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + Mockito.verify(interpreterSpy).parseCommand("@exit ."); + + // TODO test Parsing exception has not been thrown + } + + @Test + public void readCommand_interruptRequest_CTRLC_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException("")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLC_nonEmptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException(" ")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLD_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(EndOfFileException.class).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void run_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertEquals(7, lines.length); + } + + @Test + public void run_empty_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("", "exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertEquals(7, lines.length); + } + + @Test + public void run_help_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("help", "exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy, Mockito.times(2)).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertTrue(lines.length > 7); + } + + @Test + public void runCommand_unknown() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("unknown", "exit"); + + final Command command = shell.runCommand(lineReader, this.prompt); + assertNotNull(command); + assertEquals("unknown", command.getName()); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + final String printedResult = writer.toString(); + assertTrue(printedResult.startsWith("Error: ")); + } + + @Test + public void runCommand_exceptionDuringReading() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + final RuntimeException runtimeException = new RuntimeException("test"); + final RuntimeException runtimeExceptionSpy = Mockito.spy(runtimeException); + + Mockito.when(lineReader.readLine(this.prompt)).thenThrow(runtimeExceptionSpy); + + final Command command = shell.runCommand(lineReader, this.prompt); + assertNull(command); + + Mockito.verify(interpreterSpy, Mockito.never()).runCommand(Mockito.any(Command.class)); + + final String printedResult = writer.toString(); + assertTrue(printedResult.startsWith("Unexpected error: " + runtimeException.getMessage())); + + Mockito.verify(runtimeExceptionSpy).printStackTrace(); + } + + public void testPrintWelcome(final Interpreter interpreterSpy) { + Mockito.verify(interpreterSpy, Mockito.times(2)).printNormal("\n"); + Mockito.verify(interpreterSpy).printSection("Welcome to the Rulewerk interactive shell.\n"); + Mockito.verify(interpreterSpy).printNormal("For further information, type "); + Mockito.verify(interpreterSpy).printCode("@help."); + Mockito.verify(interpreterSpy).printNormal(" To quit, type "); + Mockito.verify(interpreterSpy).printCode("@exit.\n"); + } + + public void testPrintExit(final Interpreter interpreterSpy) { + Mockito.verify(interpreterSpy).printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + +} From 0691d9b014fa8f49759406f634240d35548cd2c4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 13:14:25 +0200 Subject: [PATCH 1034/1255] test terminal styled printer --- .../shell/TerminalStylePrinterTest.java | 63 +++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java new file mode 100644 index 000000000..beec791ab --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java @@ -0,0 +1,63 @@ +package org.semanticweb.rulewerk.client.shell; + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.junit.Test; +import org.mockito.Mockito; + +public class TerminalStylePrinterTest { + final Terminal terminal; + final PrintWriter writer; + final TerminalStyledPrinter terminalStyledPrinter; + + public static final String TEST_STRING = "test"; + + public TerminalStylePrinterTest() { + this.writer = Mockito.mock(PrintWriter.class); + this.terminal = Mockito.mock(Terminal.class); + Mockito.when(this.terminal.writer()).thenReturn(this.writer); + + this.terminalStyledPrinter = new TerminalStyledPrinter(this.terminal); + + } + + @Test + public void testPrintNormal() { + this.terminalStyledPrinter.printNormal(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT); + } + + @Test + public void testPrintSection() { + this.terminalStyledPrinter.printSection(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.bold()); + } + + @Test + public void testPrintEmph() { + this.terminalStyledPrinter.printEmph(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.bold()); + } + + @Test + public void testPrintCode() { + this.terminalStyledPrinter.printCode(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + } + + @Test + public void testPrintImportant() { + this.terminalStyledPrinter.printImportant(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + } + + private void testPrintStyledExpected(final AttributedStyle expectedStyle) { + final AttributedString expectedAttributedString = new AttributedString(TEST_STRING, expectedStyle); + Mockito.verify(this.writer).print(expectedAttributedString.toAnsi(this.terminal)); + Mockito.verify(this.writer).flush(); + } + +} From cbb83136e8dfb3c2c13fc5cc8aa49a12b91d12bd Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 14:27:45 +0200 Subject: [PATCH 1035/1255] interactive shell initialize interpreter unit test --- .../client/shell/DefaultConfiguration.java | 10 +++-- .../shell/DefaultConfigurationTest.java | 42 +++++++++++++++++++ .../client/shell/InteractiveShellTest.java | 31 ++++++++++++++ 3 files changed, 80 insertions(+), 3 deletions(-) create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java create mode 100644 rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index c1a026dc4..409da872a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -77,14 +77,18 @@ private static Completer buildCompleter(final Interpreter interpreter) { } public static Terminal buildTerminal() throws IOException { - return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true).build(); + return getDefaultTerminalConfiguration().build(); + } + + static TerminalBuilder getDefaultTerminalConfiguration() { + return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true); } public static String buildPrompt(final Terminal terminal) { - return buildPromptProvider().toAnsi(terminal); + return getDefaultPromptStyle().toAnsi(terminal); } - public static AttributedString buildPromptProvider() { + static AttributedString getDefaultPromptStyle() { final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); return new AttributedString("rulewerk> ", promptStyle); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java new file mode 100644 index 000000000..422456688 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java @@ -0,0 +1,42 @@ +package org.semanticweb.rulewerk.client.shell; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.jline.terminal.Terminal; +import org.jline.terminal.TerminalBuilder; +import org.jline.utils.AttributedString; +import org.junit.Test; +import org.mockito.Mockito; + +public class DefaultConfigurationTest { + + @Test + public void buildPromptProvider() { + final AttributedString promptProvider = DefaultConfiguration.getDefaultPromptStyle(); + assertEquals("rulewerk> ", promptProvider.toString()); + } + + @Test + public void buildPrompt() { + final Terminal terminal = Mockito.mock(Terminal.class); + Mockito.when(terminal.getType()).thenReturn(Terminal.TYPE_DUMB); + final String string = DefaultConfiguration.buildPrompt(terminal); + assertTrue(string.length() >= 10); + } + + public void buildTerminal() throws IOException { + final TerminalBuilder terminalBuilderMock = Mockito.mock(TerminalBuilder.class); + Mockito.when(TerminalBuilder.builder()).thenReturn(terminalBuilderMock); + + Mockito.verify(terminalBuilderMock.dumb(true)); + Mockito.verify(terminalBuilderMock.jansi(true)); + Mockito.verify(terminalBuilderMock.jna(false)); + Mockito.verify(terminalBuilderMock.system(true)); + Mockito.verify(terminalBuilderMock.build()); + + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java new file mode 100644 index 000000000..310333f30 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java @@ -0,0 +1,31 @@ +package org.semanticweb.rulewerk.client.shell; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; + +public class InteractiveShellTest { + + @Test + public void initializeInterpreter() { + final Terminal terminal = Mockito.mock(Terminal.class); + final PrintWriter writer = Mockito.mock(PrintWriter.class); + Mockito.when(terminal.writer()).thenReturn(writer); + + final InteractiveShell interactiveShell = new InteractiveShell(); + final Interpreter interpreter = interactiveShell.initializeInterpreter(terminal); + + assertTrue(interpreter.getParserConfiguration() instanceof DefaultParserConfiguration); + assertTrue(interpreter.getKnowledgeBase().getStatements().isEmpty()); + assertEquals(writer, interpreter.getWriter()); + } + + +} From 90aec041e38bb6d714c50ae2127b266e0d564233 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 14:38:04 +0200 Subject: [PATCH 1036/1255] completer help commands --- .../client/shell/DefaultConfiguration.java | 4 ++++ .../shell/DefaultConfigurationTest.java | 20 +++++++++++++++++++ .../client/shell/InteractiveShellTest.java | 20 +++++++++++++++++++ .../shell/TerminalStylePrinterTest.java | 20 +++++++++++++++++++ 4 files changed, 64 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 409da872a..29a43e4fb 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -32,6 +32,7 @@ import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; +import org.jline.reader.impl.completer.StringsCompleter; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; @@ -68,6 +69,9 @@ private static Completer buildCompleter(final Interpreter interpreter) { registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); + } else if (serializedCommandName.equals("@help")) { + nodes.add(TreeCompleter.node(serializedCommandName, + TreeCompleter.node(new StringsCompleter(registeredCommandNames)))); } else { nodes.add(TreeCompleter.node(serializedCommandName)); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java index 422456688..2c19f1f06 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java index 310333f30..2c9bfb90a 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java index beec791ab..0f0ff0253 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.PrintWriter; import org.jline.terminal.Terminal; From f09fa7848783b0d9deb1946953851176573a110a Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Aug 2020 15:39:12 +0200 Subject: [PATCH 1037/1255] clean code lineReader builder --- .../client/shell/DefaultConfiguration.java | 22 ++++++++----------- .../shell/DefaultConfigurationTest.java | 15 ------------- .../client/shell/InteractiveShellTest.java | 1 - 3 files changed, 9 insertions(+), 29 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java index 29a43e4fb..08b7242e5 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java @@ -28,7 +28,6 @@ import org.jline.builtins.Completers; import org.jline.builtins.Completers.FileNameCompleter; import org.jline.builtins.Completers.TreeCompleter; -import org.jline.builtins.Completers.TreeCompleter.Node; import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; @@ -46,18 +45,16 @@ private DefaultConfiguration() { public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) - .appName("Rulewerk Shell").completer(buildCompleter(interpreter)) - // .expander(expander()) - // .history(buildHistory()) - // .highlighter(buildHighlighter()) - ; + .appName("Rulewerk Shell"); + /* + * This allows completion on an empty buffer, rather than inserting a tab + */ + lineReaderBuilder.option(LineReader.Option.INSERT_TAB, false); + lineReaderBuilder.option(LineReader.Option.AUTO_FRESH_LINE, true); - final LineReader lineReader = lineReaderBuilder.build(); + lineReaderBuilder.completer(buildCompleter(interpreter)); - lineReader.unsetOpt(LineReader.Option.INSERT_TAB); // This allows completion on an empty buffer, rather than - // inserting a tab - lineReader.setOpt(LineReader.Option.AUTO_FRESH_LINE); - return lineReader; + return lineReaderBuilder.build(); } private static Completer buildCompleter(final Interpreter interpreter) { @@ -65,7 +62,7 @@ private static Completer buildCompleter(final Interpreter interpreter) { final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); final Set registeredCommandNames = interpreter.getRegisteredCommands(); - final List nodes = new ArrayList<>(); + final List nodes = new ArrayList<>(); registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); @@ -77,7 +74,6 @@ private static Completer buildCompleter(final Interpreter interpreter) { } }); return new TreeCompleter(nodes); - } public static Terminal buildTerminal() throws IOException { diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java index 2c19f1f06..81ca2e0e1 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java @@ -23,10 +23,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import java.io.IOException; - import org.jline.terminal.Terminal; -import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; import org.junit.Test; import org.mockito.Mockito; @@ -47,16 +44,4 @@ public void buildPrompt() { assertTrue(string.length() >= 10); } - public void buildTerminal() throws IOException { - final TerminalBuilder terminalBuilderMock = Mockito.mock(TerminalBuilder.class); - Mockito.when(TerminalBuilder.builder()).thenReturn(terminalBuilderMock); - - Mockito.verify(terminalBuilderMock.dumb(true)); - Mockito.verify(terminalBuilderMock.jansi(true)); - Mockito.verify(terminalBuilderMock.jna(false)); - Mockito.verify(terminalBuilderMock.system(true)); - Mockito.verify(terminalBuilderMock.build()); - - } - } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java index 2c9bfb90a..566ead3a1 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java @@ -47,5 +47,4 @@ public void initializeInterpreter() { assertEquals(writer, interpreter.getWriter()); } - } From 76b1359e283006cc32698a47e1e6003b7fee653b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 15:58:26 +0200 Subject: [PATCH 1038/1255] new caching TermFactory --- .../model/implementation/TermFactory.java | 172 ++++++++++++++++++ .../implementation/Skolemization.java | 20 +- .../{ => implementation}/ArgumentTest.java | 2 +- .../ConjunctionImplTest.java | 2 +- .../DataSourceDeclarationTest.java | 2 +- .../model/{ => implementation}/FactTest.java | 2 +- .../MergingPrefixDeclarationRegistryTest.java | 2 +- .../NegativeLiteralImplTest.java | 2 +- .../PositiveLiteralImplTest.java | 2 +- .../PredicateImplTest.java | 2 +- .../{ => implementation}/RuleImplTest.java | 2 +- .../{ => implementation}/SerializerTest.java | 2 +- .../model/implementation/TermFactoryTest.java | 100 ++++++++++ .../{ => implementation}/TermImplTest.java | 2 +- .../implementation/SkolemizationTest.java | 14 +- 15 files changed, 304 insertions(+), 24 deletions(-) create mode 100644 rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/ArgumentTest.java (98%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/ConjunctionImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/DataSourceDeclarationTest.java (98%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/FactTest.java (97%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/MergingPrefixDeclarationRegistryTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/NegativeLiteralImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/PositiveLiteralImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/PredicateImplTest.java (97%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/RuleImplTest.java (99%) rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/SerializerTest.java (99%) create mode 100644 rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java rename rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/{ => implementation}/TermImplTest.java (99%) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java new file mode 100644 index 000000000..c32c0bb82 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java @@ -0,0 +1,172 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * Class for creating various kinds of terms. Instances of this class maintain + * an internal cache that allows them to re-use the generated objects, which is + * useful to safe memory since the same term is often needed in multiple places. + * + * @author Markus Kroetzsch + * + */ +public class TermFactory { + + /** + * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used + * here for mapping VLog ids to terms. + * + * @author Markus Kroetzsch + * + * @param + * @param + */ + static class SimpleLruMap extends LinkedHashMap { + private static final long serialVersionUID = 7151535464938775359L; + private int maxCapacity; + + public SimpleLruMap(int initialCapacity, int maxCapacity) { + super(initialCapacity, 0.75f, true); + this.maxCapacity = maxCapacity; + } + + @Override + protected boolean removeEldestEntry(Map.Entry eldest) { + return size() >= this.maxCapacity; + } + } + + final private SimpleLruMap abstractConstants; + final private SimpleLruMap existentialVariables; + final private SimpleLruMap universalVariables; + final private SimpleLruMap predicates; + + public TermFactory() { + this(65536); + } + + public TermFactory(int cacheSize) { + abstractConstants = new SimpleLruMap<>(256, cacheSize); + existentialVariables = new SimpleLruMap<>(64, 1024); + universalVariables = new SimpleLruMap<>(64, 1024); + predicates = new SimpleLruMap<>(256, 4096); + } + + /** + * Creates a {@link UniversalVariable}. + * + * @param name name of the variable + * @return a {@link UniversalVariable} corresponding to the input. + */ + public UniversalVariable makeUniversalVariable(String name) { + if (universalVariables.containsKey(name)) { + return universalVariables.get(name); + } else { + UniversalVariable result = new UniversalVariableImpl(name); + universalVariables.put(name, result); + return result; + } + } + + /** + * Creates an {@link ExistentialVariable}. + * + * @param name name of the variable + * @return a {@link ExistentialVariable} corresponding to the input. + */ + public ExistentialVariable makeExistentialVariable(String name) { + if (existentialVariables.containsKey(name)) { + return existentialVariables.get(name); + } else { + ExistentialVariable result = new ExistentialVariableImpl(name); + existentialVariables.put(name, result); + return result; + } + } + + /** + * Creates an {@link AbstractConstant}. + * + * @param name name of the constant + * @return an {@link AbstractConstant} corresponding to the input. + */ + public AbstractConstant makeAbstractConstant(String name) { + if (abstractConstants.containsKey(name)) { + return abstractConstants.get(name); + } else { + AbstractConstant result = new AbstractConstantImpl(name); + abstractConstants.put(name, result); + return result; + } + } + + /** + * Creates a {@link DatatypeConstant} from the given input. + * + * @param lexicalValue the lexical representation of the data value + * @param datatypeIri the full absolute IRI of the datatype of this literal + * @return a {@link DatatypeConstant} corresponding to the input. + */ + public DatatypeConstant makeDatatypeConstant(String lexicalValue, String datatypeIri) { + return new DatatypeConstantImpl(lexicalValue, datatypeIri); + } + + /** + * Creates a {@link LanguageStringConstant} from the given input. + * + * @param string the string value of the constant + * @param languageTag the BCP 47 language tag of the constant; should be in + * lower case + * @return a {@link LanguageStringConstant} corresponding to the input. + */ + public LanguageStringConstant makeLanguageStringConstant(String string, String languageTag) { + return new LanguageStringConstantImpl(string, languageTag); + } + + /** + * Creates a {@link Predicate}. + * + * @param name non-blank predicate name + * @param arity predicate arity, strictly greater than 0 + * @return a {@link Predicate} corresponding to the input. + */ + public Predicate makePredicate(String name, int arity) { + String key = name + "#" + String.valueOf(arity); + if (predicates.containsKey(key)) { + return predicates.get(key); + } else { + Predicate result = new PredicateImpl(name, arity); + predicates.put(key, result); + return result; + } + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index e51a6b5d7..54080781b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -25,8 +25,8 @@ import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; /** * A class that implements skolemization and collision-free renaming of named @@ -73,13 +73,15 @@ public RenamedNamedNull getRenamedNamedNull(String name) { /** * Creates a skolem constant that is determined by the given original name. * - * @param name the name of the {@link NamedNull} to skolemize (or any other - * string for which to create a unique renaming) + * @param name the name of the {@link NamedNull} to skolemize (or any + * other string for which to create a unique renaming) + * @param termFactory the {@link TermFactory} that is used to create the + * constant * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code name}. */ - public AbstractConstant getSkolemConstant(String name) { - return new AbstractConstantImpl(getSkolemConstantName(name)); + public AbstractConstant getSkolemConstant(String name, TermFactory termFactory) { + return termFactory.makeAbstractConstant(getSkolemConstantName(name)); } /** @@ -87,12 +89,14 @@ public AbstractConstant getSkolemConstant(String name) { * The method ensures that a new unique name is generated unless the given * object is already a {@link RenamedNamedNull}. * - * @param namedNull the {@link NamedNull} to skolemize + * @param namedNull the {@link NamedNull} to skolemize + * @param termFactory the {@link TermFactory} that is used to create the + * constant * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code namedNull}. */ - public AbstractConstant getSkolemConstant(NamedNull namedNull) { - return new AbstractConstantImpl(getSkolemConstantName(namedNull)); + public AbstractConstant getSkolemConstant(NamedNull namedNull, TermFactory termFactory) { + return termFactory.makeAbstractConstant(getSkolemConstantName(namedNull)); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java index 76efe55d4..66ae2f550 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ArgumentTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java index 322bbda3f..9acc89f28 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java similarity index 98% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java index ef5c89e30..9df9cd3e0 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java index 7539d60c9..1aa8017de 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java index 157fbfded..6ea303c2c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java index 34a7e9fc9..475f1ab8b 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java index d4a93e489..c0613996c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java similarity index 97% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java index d8274db91..f77d4f24f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java index 5fc0ee6e6..6d234958c 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java index f495de508..977160aeb 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/SerializerTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java new file mode 100644 index 000000000..388289c6d --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java @@ -0,0 +1,100 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; + +public class TermFactoryTest { + + @Test + public void universalVariable_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeUniversalVariable("X"); + Term term2 = termFactory.makeUniversalVariable("Y"); + Term term3 = termFactory.makeUniversalVariable("X"); + Term term4 = new UniversalVariableImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void existentialVariable_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeExistentialVariable("X"); + Term term2 = termFactory.makeExistentialVariable("Y"); + Term term3 = termFactory.makeExistentialVariable("X"); + Term term4 = new ExistentialVariableImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void abstractConstant_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeAbstractConstant("X"); + Term term2 = termFactory.makeAbstractConstant("Y"); + Term term3 = termFactory.makeAbstractConstant("X"); + Term term4 = new AbstractConstantImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void predicate_reused() { + TermFactory termFactory = new TermFactory(); + Predicate pred1 = termFactory.makePredicate("p", 1); + Predicate pred2 = termFactory.makePredicate("q", 1); + Predicate pred3 = termFactory.makePredicate("p", 2); + Predicate pred4 = termFactory.makePredicate("p", 1); + + assertNotEquals(pred1, pred2); + assertNotEquals(pred1, pred3); + assertTrue(pred1 == pred4); + } + + @Test + public void datatypeConstant_succeeds() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeDatatypeConstant("abc", "http://test"); + Term term2 = new DatatypeConstantImpl("abc", "http://test"); + + assertEquals(term1, term2); + } + + @Test + public void languageConstant_succeeds() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeLanguageStringConstant("abc", "de"); + Term term2 = new LanguageStringConstantImpl("abc", "de"); + + assertEquals(term1, term2); + } + + @Test + public void lruCache_works() { + TermFactory.SimpleLruMap map = new TermFactory.SimpleLruMap<>(1, 3); + map.put("a", "test"); + map.put("b", "test"); + map.put("c", "test"); + map.put("c", "test2"); + + assertTrue(map.containsKey("b")); + assertTrue(map.containsKey("c")); + assertFalse(map.containsKey("a")); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java similarity index 99% rename from rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java index 2420dc479..c8230303d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.rulewerk.core.model; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index a382aa220..d87cd495f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -27,6 +27,7 @@ import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; public class SkolemizationTest { private Skolemization skolemization; @@ -76,23 +77,26 @@ public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentN @Test public void skolemConstant_succeeds() { - AbstractConstant skolem = skolemization.getSkolemConstant(name1); + TermFactory termFactory = new TermFactory(); + AbstractConstant skolem = skolemization.getSkolemConstant(name1, termFactory); assertTrue(skolem.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); } @Test public void skolemConstantFromNamedNull_succeeds() { + TermFactory termFactory = new TermFactory(); NamedNull null1 = new NamedNullImpl(name1); - AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); - AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1, termFactory); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1, termFactory); assertEquals(skolem2, skolem1); } @Test public void skolemConstantFromRenamedNamedNull_succeeds() { + TermFactory termFactory = new TermFactory(); NamedNull null1 = skolemization.getRenamedNamedNull(name1); - AbstractConstant skolem1 = skolemization.getSkolemConstant(null1); - AbstractConstant skolem2 = skolemization.getSkolemConstant(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1, termFactory); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1, termFactory); assertEquals(skolem2, skolem1); } } From c1b229807b2a7966db3df6202f1b0849fd86ecdf Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 15:59:33 +0200 Subject: [PATCH 1039/1255] use TermFactory --- .../rulewerk/rdf/RdfValueToTermConverter.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index de70adf90..c152e19c5 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -31,10 +31,7 @@ import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; -import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; /** * Helper class to convert RDF ters to Rulewerk {@link Term} objects. @@ -46,6 +43,7 @@ final class RdfValueToTermConverter { final boolean skolemize; final Skolemization skolemization = new Skolemization(); + final TermFactory termFactory = new TermFactory(); /** * Constructor. @@ -75,7 +73,7 @@ public Term convertBlankNode(final BNode bNode) { // redundant. But we want a RenamedNamedNull here, and a consistent name format // is nice too. if (skolemize) { - return skolemization.getSkolemConstant(bNode.getID()); + return skolemization.getSkolemConstant(bNode.getID(), termFactory); } else { return skolemization.getRenamedNamedNull(bNode.getID()); } @@ -83,24 +81,24 @@ public Term convertBlankNode(final BNode bNode) { public Term convertUri(final URI uri) { final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); - return new AbstractConstantImpl(escapedURIString); + return termFactory.makeAbstractConstant(escapedURIString); } public Term convertLiteral(final Literal literal) { final URI datatype = literal.getDatatype(); if (datatype != null) { - return new DatatypeConstantImpl(XMLDatatypeUtil.normalize(literal.getLabel(), datatype), + return termFactory.makeDatatypeConstant(XMLDatatypeUtil.normalize(literal.getLabel(), datatype), datatype.toString()); } else if (literal.getLanguage() != null) { - return new LanguageStringConstantImpl(literal.getLabel(), literal.getLanguage()); + return termFactory.makeLanguageStringConstant(literal.getLabel(), literal.getLanguage()); } else { - return new DatatypeConstantImpl(literal.getLabel(), PrefixDeclarationRegistry.XSD_STRING); + return termFactory.makeDatatypeConstant(literal.getLabel(), PrefixDeclarationRegistry.XSD_STRING); } } public Predicate convertUriToPredicate(final URI uri, int arity) { final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); - return new PredicateImpl(escapedURIString, arity); + return termFactory.makePredicate(escapedURIString, arity); } } From 99e12dedbae5b078b8d791d3c72add72a5d77482 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 16:14:52 +0200 Subject: [PATCH 1040/1255] use TermFactory in parser --- .../rulewerk/parser/ParserConfiguration.java | 8 +++-- .../rulewerk/parser/javacc/JavaCCParser.jj | 12 +++---- .../parser/javacc/JavaCCParserBase.java | 34 +++++++++++++++---- 3 files changed, 38 insertions(+), 16 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index cec1ad19e..0bf66066d 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -32,7 +32,7 @@ import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; @@ -129,12 +129,14 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLit * * @param lexicalForm the (unescaped) lexical form of the constant. * @param datatype the datatype, or null if not present. + * @param termFactory the {@link TermFactory} to use for creating the result * * @throws ParsingException when the lexical form is invalid for the given data * type. * @return the {@link Constant} corresponding to the given arguments. */ - public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { + public Constant parseDatatypeConstant(final String lexicalForm, final String datatype, + final TermFactory termFactory) throws ParsingException { final String type = ((datatype != null) ? datatype : PrefixDeclarationRegistry.XSD_STRING); final DatatypeConstantHandler handler = this.datatypes.get(type); @@ -142,7 +144,7 @@ public Constant parseDatatypeConstant(final String lexicalForm, final String dat return handler.createConstant(lexicalForm); } - return Expressions.makeDatatypeConstant(lexicalForm, type); + return termFactory.makeDatatypeConstant(lexicalForm, type); } /** diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 3dbff82a7..1563b9a6c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -257,7 +257,7 @@ PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclaration String predicateName; } { predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { - return Expressions.makePositiveLiteral(predicateName, terms); + return Expressions.makePositiveLiteral(createPredicate(predicateName,terms.size()), terms); } } @@ -268,7 +268,7 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException : { } { predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > { try { - return Expressions.makeFact(predicateName, terms); + return Expressions.makeFact(createPredicate(predicateName,terms.size()), terms); } catch (IllegalArgumentException e) { throw makeParseExceptionWithCause("Error parsing fact: " + e.getMessage(), e); } @@ -280,7 +280,7 @@ NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclaration String predicateName; } { < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { - return Expressions.makeNegativeLiteral(predicateName, terms); + return Expressions.makeNegativeLiteral(createPredicate(predicateName,terms.size()), terms); } } @@ -325,7 +325,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { headUniVars.add(s); else if (context == FormulaContext.BODY) bodyVars.add(s); - return Expressions.makeUniversalVariable(s); + return createUniversalVariable(s); } | t = < EXIVAR > { s = t.image.substring(1); @@ -333,7 +333,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { headExiVars.add(s); if (context == FormulaContext.BODY) throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); - return Expressions.makeExistentialVariable(s); + return createExistentialVariable(s); } | try { tt = ConfigurableLiteral () { return tt; } @@ -357,7 +357,7 @@ Constant RDFLiteral() throws PrefixDeclarationException : { } { lex = String() ( lang = < LANGTAG > | < DATATYPE > dt = absoluteIri() )? { if (lang != null) { - return Expressions.makeLanguageStringConstant(lex, lang.image); + return createLanguageStringConstant(lex, lang.image); } return createConstant(lex, dt); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index f367bb382..458a849fd 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -28,14 +28,17 @@ import org.semanticweb.rulewerk.core.model.api.Argument; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Statement; import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; @@ -64,6 +67,7 @@ public class JavaCCParserBase { private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; private Skolemization skolemization = new Skolemization(); + private TermFactory termFactory = new TermFactory(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -143,7 +147,7 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { } catch (PrefixDeclarationException e) { throw makeParseExceptionWithCause("Failed to parse IRI", e); } - return Expressions.makeAbstractConstant(absoluteIri); + return termFactory.makeAbstractConstant(absoluteIri); } /** @@ -155,16 +159,32 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { */ Constant createConstant(String lexicalForm, String datatype) throws ParseException { try { - return parserConfiguration.parseDatatypeConstant(lexicalForm, datatype); + return parserConfiguration.parseDatatypeConstant(lexicalForm, datatype, termFactory); } catch (ParsingException e) { throw makeParseExceptionWithCause("Failed to parse Constant", e); } } - NamedNull createNamedNull(String lexicalForm) throws ParseException { + NamedNull createNamedNull(String lexicalForm) { return this.skolemization.getRenamedNamedNull(lexicalForm); } + UniversalVariable createUniversalVariable(String name) { + return termFactory.makeUniversalVariable(name); + } + + ExistentialVariable createExistentialVariable(String name) { + return termFactory.makeExistentialVariable(name); + } + + LanguageStringConstant createLanguageStringConstant(String string, String languageTag) { + return termFactory.makeLanguageStringConstant(string, languageTag); + } + + Predicate createPredicate(String name, int arity) { + return termFactory.makePredicate(name, arity); + } + void addStatement(Statement statement) { knowledgeBase.addStatement(statement); } @@ -178,7 +198,7 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw } } - Predicate predicate = Expressions.makePredicate(predicateName, arity); + Predicate predicate = termFactory.makePredicate(predicateName, arity); addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } @@ -345,8 +365,8 @@ Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syn return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); } - KnowledgeBase parseDirectiveStatement(String name, List arguments, - SubParserFactory subParserFactory) throws ParseException { + KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) + throws ParseException { try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); } catch (ParsingException e) { From 274bb07d3cc1850ec152b124bfecd9c13ae1e2cb Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 16:15:09 +0200 Subject: [PATCH 1041/1255] license header --- .../model/implementation/TermFactoryTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java index 388289c6d..7719f9efa 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.model.implementation; +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import org.junit.Test; From 33fbdfe8e6108957c89b99aae4acfd54785f6743 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 17:30:18 +0200 Subject: [PATCH 1042/1255] use BufferedWriter for 100 times speedup --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index 447a50ca2..b9e7ef7e3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -1,5 +1,6 @@ package org.semanticweb.rulewerk.commands; +import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -242,7 +243,7 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma * @throws FileNotFoundException */ public Writer getFileWriter(String fileName) throws FileNotFoundException { - return new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8); + return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)); } /** From 41e9f333777578856a7f06cabfecdf8834fd97a9 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 18:03:34 +0200 Subject: [PATCH 1043/1255] support % in strings --- .../semanticweb/rulewerk/parser/javacc/JavaCCParser.jj | 2 +- .../rulewerk/parser/RuleParserParseFactTest.java | 4 ++-- .../org/semanticweb/rulewerk/parser/RuleParserTest.java | 8 ++++++++ 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index 1563b9a6c..cc7568888 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -487,7 +487,7 @@ String PrefixedName() throws PrefixDeclarationException : { } // Comments -< * > SKIP : { +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > SKIP : { < COMMENT : "%" ( ~[ "\n" ] )* "\n" > } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java index d39446d31..378382700 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java @@ -33,7 +33,7 @@ public class RuleParserParseFactTest implements ParserTestUtils { private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarationRegistry.XSD_STRING); - private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarationRegistry.XSD_STRING); + private final Constant b = Expressions.makeDatatypeConstant("b%c", PrefixDeclarationRegistry.XSD_STRING); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); @@ -45,7 +45,7 @@ public void parseFact_string_succeeds() throws ParsingException { @Test public void parseFact_twoStrings_succeeds() throws ParsingException { - assertEquals(factAB, RuleParser.parseFact("p(\"a\",\"b\") .")); + assertEquals(factAB, RuleParser.parseFact("p(\"a\",\"b%c\") .")); } @Test(expected = ParsingException.class) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 42041cc6d..a7b3be68a 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -139,6 +139,14 @@ public void testSimpleRule() throws ParsingException { assertEquals(Arrays.asList(rule1), statements); } + @Test + public void testFactWithCommentSymbol() throws ParsingException { + String input = "t(\"%test\") . "; + ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(Expressions.makeFact("t", + Expressions.makeDatatypeConstant("%test", PrefixDeclarationRegistry.XSD_STRING))), statements); + } + @Test public void testNegationRule() throws ParsingException { String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; From 676c4092717b636ee311a9d468298a56a4646bc1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 28 Aug 2020 21:27:55 +0200 Subject: [PATCH 1044/1255] correct error reporting --- .../rulewerk/reasoner/vlog/VLogReasoner.java | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java index f49bfef7a..7547ae05c 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -145,7 +145,7 @@ public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { public RuleRewriteStrategy getRuleRewriteStrategy() { return this.ruleRewriteStrategy; } - + @Override public Correctness getCorrectness() { return this.correctness; @@ -370,11 +370,7 @@ private void runChase() { } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { - // FIXME: the message generated here is not guaranteed to be the correct - // interpretation of the exception that is caught - throw new RulewerkRuntimeException( - "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", - e); + throw new RulewerkRuntimeException("VLog encounterd an error during materialization: " + e.getMessage(), e); } if (this.reasoningCompleted) { @@ -504,7 +500,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St } catch (final NotStartedException e) { throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { - final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); this.logWarningOnCorrectness(correctness); return correctness; } @@ -515,7 +511,8 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St private void validateBeforeQuerying(final PositiveLiteral query) { this.validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not allowed before Reasoner#reason() was first called!"); + throw new ReasonerStateException(this.reasonerState, + "Querying is not allowed before Reasoner#reason() was first called!"); } Validate.notNull(query, "Query atom must not be null!"); } From cb491623884a5e7f8f908af4e4bac6f4af215b24 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 30 Aug 2020 19:00:56 +0200 Subject: [PATCH 1045/1255] test InteractiveShell run, mock terminal configuration --- .../rulewerk/client/picocli/Main.java | 7 ++-- ...on.java => DefaultShellConfiguration.java} | 40 +++++++++++-------- ...Shell.java => InteractiveShellClient.java} | 12 +++--- .../client/shell/ShellConfiguration.java | 17 ++++++++ ...ava => DefaultShellConfigurationTest.java} | 6 +-- ...t.java => InteractiveShellClientTest.java} | 35 +++++++++++++++- .../rulewerk/client/shell/ShellTestUtils.java | 11 +++++ 7 files changed, 97 insertions(+), 31 deletions(-) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/{DefaultConfiguration.java => DefaultShellConfiguration.java} (71%) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/{InteractiveShell.java => InteractiveShellClient.java} (81%) create mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java rename rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/{DefaultConfigurationTest.java => DefaultShellConfigurationTest.java} (82%) rename rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/{InteractiveShellTest.java => InteractiveShellClientTest.java} (51%) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 409bd3a5b..0772d76e7 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -26,7 +26,8 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; -import org.semanticweb.rulewerk.client.shell.InteractiveShell; +import org.semanticweb.rulewerk.client.shell.DefaultShellConfiguration; +import org.semanticweb.rulewerk.client.shell.InteractiveShellClient; import picocli.CommandLine; import picocli.CommandLine.Command; @@ -38,7 +39,7 @@ * @author Irina Dragoste * */ -@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShell.class, +@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShellClient.class, RulewerkClientMaterialize.class }) public class Main { @@ -46,7 +47,7 @@ public static void main(final String[] args) throws IOException { configureLogging(); if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - new InteractiveShell().run(); + new InteractiveShellClient().run(new DefaultShellConfiguration()); } else { if (args[0].equals("materialize")) { final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java similarity index 71% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index 08b7242e5..cda67f041 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -38,26 +38,32 @@ import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.Interpreter; -public final class DefaultConfiguration { +public class DefaultShellConfiguration implements ShellConfiguration { - private DefaultConfiguration() { + public static final String PROMPT_STRING = "rulewerk> "; + + @Override + public LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { + final LineReaderBuilder lineReaderBuilder = this.getDefaultLineReaderConfiguration(terminal); + + lineReaderBuilder.terminal(terminal); + lineReaderBuilder.completer(this.buildCompleter(interpreter)); + + return lineReaderBuilder.build(); } - public static LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { - final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder().terminal(terminal) + LineReaderBuilder getDefaultLineReaderConfiguration(final Terminal terminal) { + final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder() .appName("Rulewerk Shell"); /* * This allows completion on an empty buffer, rather than inserting a tab */ lineReaderBuilder.option(LineReader.Option.INSERT_TAB, false); lineReaderBuilder.option(LineReader.Option.AUTO_FRESH_LINE, true); - - lineReaderBuilder.completer(buildCompleter(interpreter)); - - return lineReaderBuilder.build(); + return lineReaderBuilder; } - private static Completer buildCompleter(final Interpreter interpreter) { + Completer buildCompleter(final Interpreter interpreter) { // @load and @export commands require a file name as argument final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); @@ -76,21 +82,23 @@ private static Completer buildCompleter(final Interpreter interpreter) { return new TreeCompleter(nodes); } - public static Terminal buildTerminal() throws IOException { - return getDefaultTerminalConfiguration().build(); + @Override + public Terminal buildTerminal() throws IOException { + return this.getDefaultTerminalConfiguration().build(); } - static TerminalBuilder getDefaultTerminalConfiguration() { + TerminalBuilder getDefaultTerminalConfiguration() { return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true); } - public static String buildPrompt(final Terminal terminal) { - return getDefaultPromptStyle().toAnsi(terminal); + @Override + public String buildPrompt(final Terminal terminal) { + return this.getDefaultPromptStyle().toAnsi(terminal); } - static AttributedString getDefaultPromptStyle() { + AttributedString getDefaultPromptStyle() { final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); - return new AttributedString("rulewerk> ", promptStyle); + return new AttributedString(PROMPT_STRING, promptStyle); } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java similarity index 81% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java index 0eaa6dde1..4c2a9f59c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java @@ -32,20 +32,18 @@ import picocli.CommandLine.Command; @Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") -public class InteractiveShell -//implements Runnable +public class InteractiveShellClient { -// @Override - public void run() throws IOException { + public void run(final ShellConfiguration configuration) throws IOException { - final Terminal terminal = DefaultConfiguration.buildTerminal(); + final Terminal terminal = configuration.buildTerminal(); try (Interpreter interpreter = this.initializeInterpreter(terminal)) { final Shell shell = new Shell(interpreter); - final LineReader lineReader = DefaultConfiguration.buildLineReader(terminal, interpreter); - final String prompt = DefaultConfiguration.buildPrompt(terminal); + final LineReader lineReader = configuration.buildLineReader(terminal, interpreter); + final String prompt = configuration.buildPrompt(terminal); shell.run(lineReader, prompt); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java new file mode 100644 index 000000000..bb331ae77 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -0,0 +1,17 @@ +package org.semanticweb.rulewerk.client.shell; + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; + +public interface ShellConfiguration { + + LineReader buildLineReader(Terminal terminal, Interpreter interpreter); + + Terminal buildTerminal() throws IOException; + + String buildPrompt(Terminal terminal); + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java similarity index 82% rename from rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index 81ca2e0e1..e7cab36e7 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -28,11 +28,11 @@ import org.junit.Test; import org.mockito.Mockito; -public class DefaultConfigurationTest { +public class DefaultShellConfigurationTest { @Test public void buildPromptProvider() { - final AttributedString promptProvider = DefaultConfiguration.getDefaultPromptStyle(); + final AttributedString promptProvider = new DefaultShellConfiguration().getDefaultPromptStyle(); assertEquals("rulewerk> ", promptProvider.toString()); } @@ -40,7 +40,7 @@ public void buildPromptProvider() { public void buildPrompt() { final Terminal terminal = Mockito.mock(Terminal.class); Mockito.when(terminal.getType()).thenReturn(Terminal.TYPE_DUMB); - final String string = DefaultConfiguration.buildPrompt(terminal); + final String string = new DefaultShellConfiguration().buildPrompt(terminal); assertTrue(string.length() >= 10); } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java similarity index 51% rename from rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java index 566ead3a1..dddd998ed 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java @@ -23,15 +23,19 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.io.PrintWriter; +import java.io.StringWriter; +import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +import org.jline.terminal.impl.DumbTerminal; import org.junit.Test; import org.mockito.Mockito; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; -public class InteractiveShellTest { +public class InteractiveShellClientTest { @Test public void initializeInterpreter() { @@ -39,7 +43,7 @@ public void initializeInterpreter() { final PrintWriter writer = Mockito.mock(PrintWriter.class); Mockito.when(terminal.writer()).thenReturn(writer); - final InteractiveShell interactiveShell = new InteractiveShell(); + final InteractiveShellClient interactiveShell = new InteractiveShellClient(); final Interpreter interpreter = interactiveShell.initializeInterpreter(terminal); assertTrue(interpreter.getParserConfiguration() instanceof DefaultParserConfiguration); @@ -47,4 +51,31 @@ public void initializeInterpreter() { assertEquals(writer, interpreter.getWriter()); } + @Test + public void run_mockConfiguration() throws IOException { + final ShellConfiguration configuration = Mockito.mock(ShellConfiguration.class); + final Terminal terminal = Mockito.mock(DumbTerminal.class); + final StringWriter output = new StringWriter(); + final PrintWriter printWriter = new PrintWriter(output); + Mockito.when(terminal.writer()).thenReturn(printWriter); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine("prompt")).thenReturn("help", "exit"); + + Mockito.when(configuration.buildTerminal()).thenReturn(terminal); + Mockito.when(configuration.buildPrompt(terminal)).thenReturn("prompt"); + Mockito.when(configuration.buildLineReader(Mockito.eq(terminal), Mockito.any(Interpreter.class))) + .thenReturn(lineReader); + + final InteractiveShellClient shellClient = new InteractiveShellClient(); + shellClient.run(configuration); + + assertTrue(output.toString().contains("Welcome to the Rulewerk interactive shell.")); + + assertTrue(output.toString().contains("Available commands:")); + + assertTrue(output.toString().contains("Exiting Rulewerk")); + } + + } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index 189a83607..fb04f355a 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -54,6 +54,17 @@ public static Interpreter getMockInterpreter(final Writer writer) { }, terminalStyledPrinter, parserConfiguration); } + public static Interpreter getMockInterpreter(final Writer writer, final Terminal terminal) { + final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminal); + + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + final Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, terminalStyledPrinter, parserConfiguration); + } + public static void testIsExitCommand(final Command command) { assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); assertTrue(command.getArguments().isEmpty()); From 178c92fb9cca62947189a613a4bee1115c50700c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 30 Aug 2020 19:08:17 +0200 Subject: [PATCH 1046/1255] license header --- .../client/shell/ShellConfiguration.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java index bb331ae77..5a0d7adab 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.client.shell; +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.jline.reader.LineReader; From e7638b4b92d8efc9f030df450ed9325011c29a31 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 31 Aug 2020 11:07:01 +0200 Subject: [PATCH 1047/1255] mock exception thrown in unit test --- .../semanticweb/rulewerk/client/shell/ShellTest.java | 11 ++++++----- .../rulewerk/client/shell/ShellTestUtils.java | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java index d5f1d3e18..8c3e81448 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -294,10 +294,11 @@ public void runCommand_exceptionDuringReading() throws CommandExecutionException final Shell shell = new Shell(interpreterSpy); final LineReader lineReader = Mockito.mock(LineReader.class); - final RuntimeException runtimeException = new RuntimeException("test"); - final RuntimeException runtimeExceptionSpy = Mockito.spy(runtimeException); + final RuntimeException exception = Mockito.mock(RuntimeException.class); + Mockito.when(exception.getMessage()) + .thenReturn("This exception is thrown intentionally as part of a unit test"); - Mockito.when(lineReader.readLine(this.prompt)).thenThrow(runtimeExceptionSpy); + Mockito.when(lineReader.readLine(this.prompt)).thenThrow(exception); final Command command = shell.runCommand(lineReader, this.prompt); assertNull(command); @@ -305,9 +306,9 @@ public void runCommand_exceptionDuringReading() throws CommandExecutionException Mockito.verify(interpreterSpy, Mockito.never()).runCommand(Mockito.any(Command.class)); final String printedResult = writer.toString(); - assertTrue(printedResult.startsWith("Unexpected error: " + runtimeException.getMessage())); + assertTrue(printedResult.startsWith("Unexpected error: " + exception.getMessage())); - Mockito.verify(runtimeExceptionSpy).printStackTrace(); + Mockito.verify(exception).printStackTrace(); } public void testPrintWelcome(final Interpreter interpreterSpy) { diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index fb04f355a..9d45cb02b 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -54,7 +54,7 @@ public static Interpreter getMockInterpreter(final Writer writer) { }, terminalStyledPrinter, parserConfiguration); } - public static Interpreter getMockInterpreter(final Writer writer, final Terminal terminal) { + public static Interpreter getMockInterpreter(final Terminal terminal) { final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminal); final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); From 32cff14e83ca3465ba68f0c1fcdb36dddb45eacc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 31 Aug 2020 13:48:35 +0200 Subject: [PATCH 1048/1255] improved exception error reporting --- .../rulewerk/parser/directives/ImportFileDirectiveHandler.java | 2 +- .../semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 2580a2b56..403238df4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -54,7 +54,7 @@ public KnowledgeBase handleDirective(List arguments, final SubParserFa RuleParser.parseInto(kb, stream, parserConfiguration); }); } catch (RulewerkException | IOException | IllegalArgumentException e) { - throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); + throw new ParsingException("Could not import rules file \"" + file.getName() + "\": " + e.getMessage(), e); } return knowledgeBase; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 458a849fd..981632edf 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -370,7 +370,7 @@ KnowledgeBase parseDirectiveStatement(String name, List arguments, Sub try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); } catch (ParsingException e) { - throw makeParseExceptionWithCause("Failed while trying to parse directive statement", e); + throw makeParseExceptionWithCause(e.getMessage(), e); } } From 6dab3cb170ea1a99f98a5f88cdd5c3e9de71d683 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Tue, 1 Sep 2020 10:14:00 +0200 Subject: [PATCH 1049/1255] improved spelling --- .../rulewerk/commands/LoadCommandInterpreter.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 62878f8be..952f3060c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -96,6 +96,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); + int countDataSourceDeclarationsBefore = interpreter.getKnowledgeBase().getDataSourceDeclarations().size(); if (TASK_RLS.equals(task)) { loadKb(interpreter, fileName); @@ -108,9 +109,11 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); } - interpreter.printNormal( - "Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + " new fact(s) and " - + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + " new rule(s).\n"); + interpreter.printNormal("Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + + " new fact(s), " + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + + " new rule(s), and " + (interpreter.getKnowledgeBase().getDataSourceDeclarations().size() + - countDataSourceDeclarationsBefore) + + " new datasource declaration(s).\n"); } From 914a30021ae8023837ef2d606ba6b38dd962d62b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 20:01:45 +0200 Subject: [PATCH 1050/1255] test completers --- .../shell/DefaultShellConfiguration.java | 29 +++-- .../client/shell/InteractiveShellClient.java | 2 +- .../rulewerk/client/shell/Shell.java | 13 ++- .../client/shell/ShellConfiguration.java | 4 +- .../shell/DefaultShellConfigurationTest.java | 102 ++++++++++++++++++ .../shell/InteractiveShellClientTest.java | 3 +- .../rulewerk/client/shell/ShellTestUtils.java | 8 +- .../rulewerk/commands/Interpreter.java | 22 ++-- 8 files changed, 141 insertions(+), 42 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index cda67f041..d108ea6c0 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -22,13 +22,12 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; -import java.util.Set; import org.jline.builtins.Completers; import org.jline.builtins.Completers.FileNameCompleter; import org.jline.builtins.Completers.TreeCompleter; -import org.jline.reader.Completer; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; import org.jline.reader.impl.completer.StringsCompleter; @@ -36,25 +35,24 @@ import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; -import org.semanticweb.rulewerk.commands.Interpreter; public class DefaultShellConfiguration implements ShellConfiguration { public static final String PROMPT_STRING = "rulewerk> "; @Override - public LineReader buildLineReader(final Terminal terminal, final Interpreter interpreter) { - final LineReaderBuilder lineReaderBuilder = this.getDefaultLineReaderConfiguration(terminal); + public LineReader buildLineReader(final Terminal terminal, final Collection registeredCommands) { + final LineReaderBuilder lineReaderBuilder = this.getDefaultLineReaderConfiguration(LineReaderBuilder.builder()); lineReaderBuilder.terminal(terminal); - lineReaderBuilder.completer(this.buildCompleter(interpreter)); + lineReaderBuilder.completer(this.buildCompleter(registeredCommands)); return lineReaderBuilder.build(); } - LineReaderBuilder getDefaultLineReaderConfiguration(final Terminal terminal) { - final LineReaderBuilder lineReaderBuilder = LineReaderBuilder.builder() - .appName("Rulewerk Shell"); + LineReaderBuilder getDefaultLineReaderConfiguration(final LineReaderBuilder lineReaderBuilder) { + + lineReaderBuilder.appName("Rulewerk Shell"); /* * This allows completion on an empty buffer, rather than inserting a tab */ @@ -63,18 +61,17 @@ LineReaderBuilder getDefaultLineReaderConfiguration(final Terminal terminal) { return lineReaderBuilder; } - Completer buildCompleter(final Interpreter interpreter) { + TreeCompleter buildCompleter(final Collection registeredCommands) { // @load and @export commands require a file name as argument final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); - final Set registeredCommandNames = interpreter.getRegisteredCommands(); final List nodes = new ArrayList<>(); - registeredCommandNames.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { + registeredCommands.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { if (serializedCommandName.equals("@load")) { nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); } else if (serializedCommandName.equals("@help")) { nodes.add(TreeCompleter.node(serializedCommandName, - TreeCompleter.node(new StringsCompleter(registeredCommandNames)))); + TreeCompleter.node(new StringsCompleter(registeredCommands)))); } else { nodes.add(TreeCompleter.node(serializedCommandName)); } @@ -84,11 +81,11 @@ Completer buildCompleter(final Interpreter interpreter) { @Override public Terminal buildTerminal() throws IOException { - return this.getDefaultTerminalConfiguration().build(); + return this.getDefaultTerminalConfiguration(TerminalBuilder.builder()).build(); } - TerminalBuilder getDefaultTerminalConfiguration() { - return TerminalBuilder.builder().dumb(true).jansi(true).jna(false).system(true); + TerminalBuilder getDefaultTerminalConfiguration(final TerminalBuilder terminalBuilder) { + return terminalBuilder.dumb(true).jansi(true).jna(false).system(true); } @Override diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java index 4c2a9f59c..cd62e53a5 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java @@ -42,7 +42,7 @@ public void run(final ShellConfiguration configuration) throws IOException { try (Interpreter interpreter = this.initializeInterpreter(terminal)) { final Shell shell = new Shell(interpreter); - final LineReader lineReader = configuration.buildLineReader(terminal, interpreter); + final LineReader lineReader = configuration.buildLineReader(terminal, shell.getRegisteredCommands()); final String prompt = configuration.buildPrompt(terminal); shell.run(lineReader, prompt); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index e4c82fc63..665a0a1df 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -1,9 +1,5 @@ package org.semanticweb.rulewerk.client.shell; -import org.jline.reader.EndOfFileException; -import org.jline.reader.LineReader; -import org.jline.reader.UserInterruptException; - /*- * #%L * Rulewerk Client @@ -24,6 +20,11 @@ * #L% */ +import java.util.Set; + +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; import org.semanticweb.rulewerk.commands.CommandExecutionException; @@ -150,4 +151,8 @@ boolean isRunning() { return this.running; } + public Set getRegisteredCommands() { + return this.interpreter.getRegisteredCommands(); + } + } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java index 5a0d7adab..9c83ae976 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -21,14 +21,14 @@ */ import java.io.IOException; +import java.util.Collection; import org.jline.reader.LineReader; import org.jline.terminal.Terminal; -import org.semanticweb.rulewerk.commands.Interpreter; public interface ShellConfiguration { - LineReader buildLineReader(Terminal terminal, Interpreter interpreter); + LineReader buildLineReader(Terminal terminal, Collection commands); Terminal buildTerminal() throws IOException; diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index e7cab36e7..87f369cd9 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -21,15 +21,37 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.jline.builtins.Completers.TreeCompleter; +import org.jline.reader.Candidate; +import org.jline.reader.LineReader; +import org.jline.reader.ParsedLine; import org.jline.terminal.Terminal; import org.jline.utils.AttributedString; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.mockito.Mockito; public class DefaultShellConfigurationTest { + @Rule + public TemporaryFolder folder = new TemporaryFolder(new File(".")); + + public static final List SHELL_COMMANDS = Arrays.asList("help", "load", "assert", "retract", "addsource", + "delsource", "setprefix", "clear", + "reason", "query", "export", "showkb", "exit"); + @Test public void buildPromptProvider() { final AttributedString promptProvider = new DefaultShellConfiguration().getDefaultPromptStyle(); @@ -44,4 +66,84 @@ public void buildPrompt() { assertTrue(string.length() >= 10); } + @Test + public void buildCompleterEmptyLine() { + final ArrayList readWords = new ArrayList(); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + final Set expectedCandidates = SHELL_COMMANDS.stream().map(c -> "@" + c).collect(Collectors.toSet()); + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterHelp() { + final ArrayList readWords = new ArrayList(); + readWords.add("@help"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + final Set expectedCandidates = new HashSet(SHELL_COMMANDS); + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterLoad() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + } + + private Set getCompleterCandidates(final ArrayList readWords, final String wordToComplete) { + final List candidates = new ArrayList<>(); + + final TreeCompleter completer = new DefaultShellConfiguration().buildCompleter(SHELL_COMMANDS); + final LineReader reader = Mockito.mock(LineReader.class); + + final ParsedLine parsedLine = this.makeParsedLine(readWords, wordToComplete); + completer.complete(reader, parsedLine, candidates); + return candidates.stream().map(c -> c.value()).collect(Collectors.toSet()); + } + + + private ParsedLine makeParsedLine(final List readWords, final String wordToComplete) { + final ParsedLine parsedLine = new ParsedLine() { + + @Override + public List words() { + return readWords; + } + + @Override + public int wordIndex() { + return readWords.size(); + } + + @Override + public int wordCursor() { + return this.word().length(); + } + + @Override + public String word() { + return wordToComplete; + } + + @Override + public String line() { + // Only used by PipedlineCompleter + return null; + } + + @Override + public int cursor() { + return this.line().length(); + } + }; + return parsedLine; + } + } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java index dddd998ed..e37722070 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java @@ -31,6 +31,7 @@ import org.jline.terminal.Terminal; import org.jline.terminal.impl.DumbTerminal; import org.junit.Test; +import org.mockito.ArgumentMatchers; import org.mockito.Mockito; import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; @@ -64,7 +65,7 @@ public void run_mockConfiguration() throws IOException { Mockito.when(configuration.buildTerminal()).thenReturn(terminal); Mockito.when(configuration.buildPrompt(terminal)).thenReturn("prompt"); - Mockito.when(configuration.buildLineReader(Mockito.eq(terminal), Mockito.any(Interpreter.class))) + Mockito.when(configuration.buildLineReader(Mockito.eq(terminal), ArgumentMatchers.anyCollection())) .thenReturn(lineReader); final InteractiveShellClient shellClient = new InteractiveShellClient(); diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java index 9d45cb02b..49d1a7250 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -42,16 +42,10 @@ private ShellTestUtils() { public static Interpreter getMockInterpreter(final Writer writer) { final Terminal terminalMock = Mockito.mock(Terminal.class); - final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminalMock); final PrintWriter printWriter = new PrintWriter(writer); Mockito.when(terminalMock.writer()).thenReturn(printWriter); - final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); - return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { - final Reasoner reasoner = Mockito.mock(Reasoner.class); - Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); - return reasoner; - }, terminalStyledPrinter, parserConfiguration); + return getMockInterpreter(terminalMock); } public static Interpreter getMockInterpreter(final Terminal terminal) { diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index b9e7ef7e3..daecfcddd 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -81,7 +81,7 @@ public Interpreter(final KnowledgeBaseProvider knowledgeBaseProvider, final Reas final StyledPrinter printer, final ParserConfiguration parserConfiguration) { this.knowledgeBaseProvider = knowledgeBaseProvider; this.reasonerProvider = reasonerProvider; - clearReasonerAndKnowledgeBase(); + this.clearReasonerAndKnowledgeBase(); this.printer = printer; this.parserConfiguration = parserConfiguration; this.registerDefaultCommandInterpreters(); @@ -242,7 +242,7 @@ public static PositiveLiteral extractPositiveLiteralArgument(final Command comma * @return * @throws FileNotFoundException */ - public Writer getFileWriter(String fileName) throws FileNotFoundException { + public Writer getFileWriter(final String fileName) throws FileNotFoundException { return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)); } @@ -253,7 +253,7 @@ public Writer getFileWriter(String fileName) throws FileNotFoundException { * @return * @throws FileNotFoundException */ - public InputStream getFileInputStream(String fileName) throws FileNotFoundException { + public InputStream getFileInputStream(final String fileName) throws FileNotFoundException { return new FileInputStream(fileName); } @@ -262,11 +262,11 @@ public InputStream getFileInputStream(String fileName) throws FileNotFoundExcept * statements are cleared. */ public void clearReasonerAndKnowledgeBase() { - closeReasoner(); - reasoner = reasonerProvider.reasoner(knowledgeBaseProvider.knowledgeBase()); + this.closeReasoner(); + this.reasoner = this.reasonerProvider.reasoner(this.knowledgeBaseProvider.knowledgeBase()); try { - reasoner.reason(); - } catch (IOException e) { + this.reasoner.reason(); + } catch (final IOException e) { throw new RulewerkRuntimeException("Failed to initialise reasoner: " + e.getMessage(), e); } } @@ -276,16 +276,16 @@ public void clearReasonerAndKnowledgeBase() { */ @Override public void close() { - closeReasoner(); + this.closeReasoner(); } /** * Closes and discards the internal {@link Reasoner}. */ private void closeReasoner() { - if (reasoner != null) { - reasoner.close(); - reasoner = null; + if (this.reasoner != null) { + this.reasoner.close(); + this.reasoner = null; } } From 1c69d5ce6559d7df1a22f39fddf4d9ead06ec524 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:46:37 +0200 Subject: [PATCH 1051/1255] delete test output file binaryFacts.csv --- rulewerk-vlog/src/test/data/output/binaryFacts.csv | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/binaryFacts.csv diff --git a/rulewerk-vlog/src/test/data/output/binaryFacts.csv b/rulewerk-vlog/src/test/data/output/binaryFacts.csv deleted file mode 100644 index bcaabc2bc..000000000 --- a/rulewerk-vlog/src/test/data/output/binaryFacts.csv +++ /dev/null @@ -1,2 +0,0 @@ -c1,c2 -c3,c4 From bdf89b0fddcdebe198f5477a5235ef83268892a1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:47:20 +0200 Subject: [PATCH 1052/1255] delete unit test output file --- rulewerk-vlog/src/test/data/output/exclude_blanks.csv | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/exclude_blanks.csv diff --git a/rulewerk-vlog/src/test/data/output/exclude_blanks.csv b/rulewerk-vlog/src/test/data/output/exclude_blanks.csv deleted file mode 100644 index e69de29bb..000000000 From fdefff5db5cf91cbaf98f7fd7084b32372b278c4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:47:38 +0200 Subject: [PATCH 1053/1255] delete unit test output file --- rulewerk-vlog/src/test/data/output/include_blanks.csv | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/include_blanks.csv diff --git a/rulewerk-vlog/src/test/data/output/include_blanks.csv b/rulewerk-vlog/src/test/data/output/include_blanks.csv deleted file mode 100644 index e502cf529..000000000 --- a/rulewerk-vlog/src/test/data/output/include_blanks.csv +++ /dev/null @@ -1,2 +0,0 @@ -c,1_2_0 -c,1_3_0 From 3969b5868f78841e74914bdf49a36e59e2d2b3b2 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 1 Sep 2020 22:47:59 +0200 Subject: [PATCH 1054/1255] delete unit test output file --- rulewerk-vlog/src/test/data/output/unaryFacts.csv | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 rulewerk-vlog/src/test/data/output/unaryFacts.csv diff --git a/rulewerk-vlog/src/test/data/output/unaryFacts.csv b/rulewerk-vlog/src/test/data/output/unaryFacts.csv deleted file mode 100644 index d0aaf976a..000000000 --- a/rulewerk-vlog/src/test/data/output/unaryFacts.csv +++ /dev/null @@ -1,2 +0,0 @@ -c1 -c2 From 8f54de9ed624413e16727b4455715e6d08b4ad9b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 2 Sep 2020 20:04:17 +0200 Subject: [PATCH 1055/1255] allow duplicate imports --- .../semanticweb/rulewerk/core/reasoner/KnowledgeBase.java | 8 ++++---- .../org/semanticweb/rulewerk/parser/RuleParserTest.java | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 6936811e7..a7bd76fc2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -490,10 +490,10 @@ public void importRulesFile(File file, AdditionalInputParser parseFunction) Validate.notNull(file, "file must not be null"); boolean isNewFile = this.importedFilePaths.add(file.getCanonicalPath()); - Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); - - try (InputStream stream = new FileInputStream(file)) { - parseFunction.parseInto(stream, this); + if (isNewFile) { + try (InputStream stream = new FileInputStream(file)) { + parseFunction.parseInto(stream, this); + } } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index a7b3be68a..3b3b77b60 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -536,15 +536,15 @@ public void parse_reusedNamedNulls_identical() throws ParsingException { assertArgumentIsNamedNull(fact3, 1); } - @Test(expected = ParsingException.class) - public void parseInto_duplicateImportStatements_throws() throws ParsingException { + @Test + public void parseInto_duplicateImportStatements_succeeds() throws ParsingException { String input = "@import \"src/test/resources/facts.rls\" . "; KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } - @Test(expected = ParsingException.class) - public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingException { + @Test + public void parseInto_duplicateRelativeImportStatements_succeeds() throws ParsingException { String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); From 2cbc5022710f030f9ce1c326951aed62ea162faa Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 2 Sep 2020 20:08:50 +0200 Subject: [PATCH 1056/1255] set reasoner log level to Error --- .../java/org/semanticweb/rulewerk/commands/Interpreter.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index daecfcddd..c14dc1c4c 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -42,6 +42,7 @@ import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Terms; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; @@ -264,6 +265,7 @@ public InputStream getFileInputStream(final String fileName) throws FileNotFound public void clearReasonerAndKnowledgeBase() { this.closeReasoner(); this.reasoner = this.reasonerProvider.reasoner(this.knowledgeBaseProvider.knowledgeBase()); + this.reasoner.setLogLevel(LogLevel.ERROR); try { this.reasoner.reason(); } catch (final IOException e) { From 5ed733eff4974799b4b554b8a8f2eaecb54ac4c5 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 20:12:54 +0200 Subject: [PATCH 1057/1255] added javadoc --- .../rulewerk/client/picocli/Main.java | 57 ++++++++++++++----- .../client/picocli/RulewerkClient.java | 46 --------------- .../shell/DefaultShellConfiguration.java | 7 +++ .../client/shell/InteractiveShellClient.java | 20 ++++++- .../rulewerk/client/shell/Shell.java | 7 ++- .../client/shell/ShellConfiguration.java | 28 +++++++++ .../client/shell/TerminalStyledPrinter.java | 42 +++++++++----- .../commands/ExitCommandInterpreter.java | 17 ++++++ .../shell/InteractiveShellClientTest.java | 2 +- .../rulewerk/commands/StyledPrinter.java | 10 ++++ 10 files changed, 156 insertions(+), 80 deletions(-) delete mode 100644 rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index 0772d76e7..f8b59bb0a 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -21,6 +21,7 @@ */ import java.io.IOException; +import java.io.PrintStream; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; @@ -33,8 +34,8 @@ import picocli.CommandLine.Command; /** - * Dummy class with main method that is a command with subcommands shell and - * materialize + * Class with main method that is a command with subcommands {@code shell} + * (default) and {@code materialize}. * * @author Irina Dragoste * @@ -43,28 +44,56 @@ RulewerkClientMaterialize.class }) public class Main { + public static String INTERACTIVE_SHELL_COMMAND = "shell"; + public static String COMMAND_LINE_CLIENT_COMMAND = "materialize"; + public static String HELP_COMMAND = "help"; + + /** + * Launches the client application for Rulewerk. The functionality depends on + * the given command-line args ({@code args}): + *

    + *
  • empty args ("") or argument "shell"
  • launch an + * interactive shell. + *
  • argument "materialize" can be used with different options to complete + * several materialization and querying tasks from the command line.
  • + *
+ *
  • help
  • + * + * @param args + * + * @throws IOException + */ public static void main(final String[] args) throws IOException { configureLogging(); - - if (args.length == 0 || (args.length > 0 && args[0].equals("shell"))) { - new InteractiveShellClient().run(new DefaultShellConfiguration()); + + if (args.length == 0 || (args.length > 0 && INTERACTIVE_SHELL_COMMAND.equals(args[0]))) { + new InteractiveShellClient().launchShell(new DefaultShellConfiguration()); } else { - if (args[0].equals("materialize")) { + if (COMMAND_LINE_CLIENT_COMMAND.equals(args[0])) { final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); commandline.execute(args); } else { - if (!args[0].equals("help")) { - System.out.println("Invalid command."); - } - // TODO improve help - // TODO do we need to create a Help command? - (new CommandLine(new Main())).usage(System.out); - + displayHelp(args, System.out); } } + } + static void displayHelp(final String[] args, final PrintStream printStream) { + if (!HELP_COMMAND.equals(args[0])) { + printStream.println("Invalid command."); + } + + if (HELP_COMMAND.equals(args[0]) && args.length > 1 && COMMAND_LINE_CLIENT_COMMAND.equals(args[1])) { + (new CommandLine(new RulewerkClientMaterialize())).usage(printStream); + } else { + (new CommandLine(new Main())).usage(printStream); + } } - + + /** + * Configures {@link Logger} settings. Messages are logged to the console. Log + * level is set to {@link Level.FATAL}. + */ public static void configureLogging() { // Create the appender that will write log messages to the console. final ConsoleAppender consoleAppender = new ConsoleAppender(); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java deleted file mode 100644 index ee48b9beb..000000000 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.semanticweb.rulewerk.client.picocli; - -/*- - * #%L - * Rulewerk Client - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import picocli.CommandLine; - -import picocli.CommandLine.Command; - -/** - * Stand alone client for Rulewerk. - * - * @author Larry Gonzalez - * - */ -@Command(name = "java -jar RulewerkClient.jar", description = "RulewerkClient: A command line client for Rulewerk.", subcommands = { - RulewerkClientMaterialize.class }) -public class RulewerkClient implements Runnable { - - public static void main(String[] args) { - CommandLine commandline = new CommandLine(new RulewerkClient()); - commandline.execute(args); - } - - @Override - public void run() { - (new CommandLine(new RulewerkClient())).usage(System.out); - } -} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index d108ea6c0..bc93de9c6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -36,6 +36,13 @@ import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; +/** + * An implementation of {@link ShellConfiguration} with custom styling and + * completion for recognized commands. + * + * @author Irina Dragoste + * + */ public class DefaultShellConfiguration implements ShellConfiguration { public static final String PROMPT_STRING = "rulewerk> "; diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java index cd62e53a5..e74a7a189 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java @@ -31,18 +31,32 @@ import picocli.CommandLine.Command; -@Command(name = "shell", description = "An interactive shell for Rulewerk. The default command.") +/** + * Class for executing the default {@code shell} command, which launches an + * interactive shell. + * + * @author Irina Dragoste + * + */ +@Command(name = "shell", description = "Launch an interactive shell for Rulewerk. The default command.") public class InteractiveShellClient { - public void run(final ShellConfiguration configuration) throws IOException { + /** + * Builds and launches an interactive shell, which accepts commands for running + * Rulewerk tasks using VLog Reasosner. + * + * @param configuration for shell I/O resources + * @throws IOException if {@link Terminal} cannot be built. + */ + public void launchShell(final ShellConfiguration configuration) throws IOException { final Terminal terminal = configuration.buildTerminal(); try (Interpreter interpreter = this.initializeInterpreter(terminal)) { final Shell shell = new Shell(interpreter); - final LineReader lineReader = configuration.buildLineReader(terminal, shell.getRegisteredCommands()); + final LineReader lineReader = configuration.buildLineReader(terminal, shell.getCommands()); final String prompt = configuration.buildPrompt(terminal); shell.run(lineReader, prompt); diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java index 665a0a1df..103d6abb1 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -151,7 +151,12 @@ boolean isRunning() { return this.running; } - public Set getRegisteredCommands() { + /** + * Getter for the shell commands. + * + * @return the names of the commands that are recognized by this shell. + */ + public Set getCommands() { return this.interpreter.getRegisteredCommands(); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java index 9c83ae976..fc9e42e04 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -26,12 +26,40 @@ import org.jline.reader.LineReader; import org.jline.terminal.Terminal; +/** + * Interface for providing I/O resources for an interactive shell: terminal, + * terminal prompt, and line reader + * + * @author Irina Dragoste + * + */ public interface ShellConfiguration { + /** + * Provides a line reader that reads user input from the given terminal. The + * line reader offers tab-completion for the given list of command names. + * + * @param terminal terminal to read from. + * @param commands list of command names recognized by the interactive shell. + * @return a line reader for interacting with the shell terminal. + */ LineReader buildLineReader(Terminal terminal, Collection commands); + /** + * Provides an I/O terminal for the interactive shell. + * + * @return the interactive shell terminal. + * @throws IOException when the terminal cannot be built + */ Terminal buildTerminal() throws IOException; + /** + * Provides the prompt text (with colour and style) to be displayed on the given + * terminal. + * + * @param terminal terminal for the prompt to be displayed on + * @return the prompt text with embedded style. + */ String buildPrompt(Terminal terminal); } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java index d481f2c3a..4bf7e91ea 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java @@ -27,47 +27,59 @@ import org.jline.utils.AttributedStyle; import org.semanticweb.rulewerk.commands.StyledPrinter; +/** + * StyledPrinter that uses the {@link PrintWriter} of a {@link Terminal} and has + * various styling. + * + * @author Irina Dragoste + * + */ public class TerminalStyledPrinter implements StyledPrinter { final Terminal terminal; + /** + * Constructor providing a terminal for the StyledPrinter to write to. + * + * @param terminal the terminal to write to + */ public TerminalStyledPrinter(final Terminal terminal) { this.terminal = terminal; } @Override - public void printNormal(String string) { - printStyled(string, AttributedStyle.DEFAULT); + public void printNormal(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT); } @Override - public void printSection(String string) { - printStyled(string, AttributedStyle.DEFAULT.bold()); + public void printSection(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.bold()); } @Override - public void printEmph(String string) { - printStyled(string, AttributedStyle.DEFAULT.bold()); + public void printEmph(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.bold()); } @Override - public void printCode(String string) { - printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + public void printCode(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); } @Override - public void printImportant(String string) { - printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + public void printImportant(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); } @Override public PrintWriter getWriter() { - return terminal.writer(); + return this.terminal.writer(); } - private void printStyled(String string, AttributedStyle attributedStyle) { - AttributedString attributedString = new AttributedString(string, attributedStyle); - getWriter().print(attributedString.toAnsi(terminal)); - getWriter().flush(); + private void printStyled(final String string, final AttributedStyle attributedStyle) { + final AttributedString attributedString = new AttributedString(string, attributedStyle); + this.getWriter().print(attributedString.toAnsi(this.terminal)); + this.getWriter().flush(); } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index 706275678..b6506b81b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -28,16 +28,33 @@ import org.semanticweb.rulewerk.commands.Interpreter; import org.semanticweb.rulewerk.core.model.api.Command; +/** + * Interpreter for the command to exit an interactive shell + * + * @author Irina Dragoste + * + */ public class ExitCommandInterpreter implements CommandInterpreter { public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>(0)); + /** + * Command names used for requesting exiting an interactive shell + * + * @author Irina Dragoste + * + */ public static enum ExitCommandName { exit; } final Shell shell; + /** + * Constructor that provides the interactive shell from which exit is requested + * + * @param shell interactive shell to exit from + */ public ExitCommandInterpreter(final Shell shell) { this.shell = shell; } diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java index e37722070..5d328751f 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java @@ -69,7 +69,7 @@ public void run_mockConfiguration() throws IOException { .thenReturn(lineReader); final InteractiveShellClient shellClient = new InteractiveShellClient(); - shellClient.run(configuration); + shellClient.launchShell(configuration); assertTrue(output.toString().contains("Welcome to the Rulewerk interactive shell.")); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java index 8e642b597..ebaf2867d 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java @@ -22,6 +22,12 @@ import java.io.Writer; +/** + * Interface for printing given Strings to a writer using different styles. + * + * @author Irina Dragoste + * + */ public interface StyledPrinter { void printNormal(String string); @@ -34,6 +40,10 @@ public interface StyledPrinter { void printImportant(String string); + /** + * + * @return the writer to print to + */ Writer getWriter(); } From 765b08c06c333c07270a4bc448dbea7c80b8a590 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Wed, 2 Sep 2020 21:18:26 +0200 Subject: [PATCH 1058/1255] better error reporting --- .../semanticweb/rulewerk/commands/Interpreter.java | 12 ++++++++---- .../commands/LoadCommandInterpreterTest.java | 9 +++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java index c14dc1c4c..1be04b71b 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -215,7 +215,7 @@ public static String extractStringArgument(final Command command, final int inde try { return Terms.extractString(command.getArguments().get(index).fromTerm() .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); - } catch (final IllegalArgumentException e) { + } catch (final IllegalArgumentException | IndexOutOfBoundsException e) { throw getArgumentTypeError(index, "string", parameterName); } } @@ -225,15 +225,19 @@ public static String extractNameArgument(final Command command, final int index, try { return Terms.extractName(command.getArguments().get(index).fromTerm() .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); - } catch (final IllegalArgumentException e) { + } catch (final IllegalArgumentException | IndexOutOfBoundsException e) { throw getArgumentTypeError(index, "constant", parameterName); } } public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, final String parameterName) throws CommandExecutionException { - return command.getArguments().get(index).fromPositiveLiteral() - .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); + try { + return command.getArguments().get(index).fromPositiveLiteral() + .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); + } catch (final IndexOutOfBoundsException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } } /** diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java index ae4d121a2..ee3d1ac42 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -296,6 +296,15 @@ public void wrongArgumentCount_fails() throws ParsingException, CommandExecution Command command = interpreter.parseCommand("@load ."); interpreter.runCommand(command); } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithOptional_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL ."); + interpreter.runCommand(command); + } @Test(expected = CommandExecutionException.class) public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandExecutionException { From a07d3b9bc07ba3d86b7bcd4c9676662394eab534 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 21:47:22 +0200 Subject: [PATCH 1059/1255] completers for @load, @export and @clear --- .../shell/DefaultShellConfiguration.java | 40 +++++++++---- .../commands/ClearCommandInterpreter.java | 24 ++++---- .../commands/ExportCommandInterpreter.java | 28 ++++----- .../commands/LoadCommandInterpreter.java | 60 +++++++++---------- 4 files changed, 86 insertions(+), 66 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index bc93de9c6..f893512f7 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -26,8 +26,8 @@ import java.util.List; import org.jline.builtins.Completers; -import org.jline.builtins.Completers.FileNameCompleter; import org.jline.builtins.Completers.TreeCompleter; +import org.jline.builtins.Completers.TreeCompleter.Node; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; import org.jline.reader.impl.completer.StringsCompleter; @@ -35,6 +35,9 @@ import org.jline.terminal.TerminalBuilder; import org.jline.utils.AttributedString; import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.ClearCommandInterpreter; +import org.semanticweb.rulewerk.commands.ExportCommandInterpreter; +import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; /** * An implementation of {@link ShellConfiguration} with custom styling and @@ -69,21 +72,38 @@ LineReaderBuilder getDefaultLineReaderConfiguration(final LineReaderBuilder line } TreeCompleter buildCompleter(final Collection registeredCommands) { -// @load and @export commands require a file name as argument - final FileNameCompleter fileNameCompleter = new Completers.FileNameCompleter(); + final Node fileNameCompleterNode = TreeCompleter.node(new Completers.FileNameCompleter()); final List nodes = new ArrayList<>(); - registeredCommands.stream().map(commandName -> "@" + commandName).forEach(serializedCommandName -> { - if (serializedCommandName.equals("@load")) { - nodes.add(TreeCompleter.node(serializedCommandName, TreeCompleter.node(fileNameCompleter))); - } else if (serializedCommandName.equals("@help")) { - nodes.add(TreeCompleter.node(serializedCommandName, - TreeCompleter.node(new StringsCompleter(registeredCommands)))); + registeredCommands.stream().map(command -> "@" + command).forEach(commandName -> { + if (commandName.equals("@load")) { + nodes.add(TreeCompleter.node(commandName, fileNameCompleterNode)); + + final StringsCompleter taskOptionsCompleter = new StringsCompleter(LoadCommandInterpreter.TASK_OWL, + LoadCommandInterpreter.TASK_RDF, LoadCommandInterpreter.TASK_RLS); + nodes.add(TreeCompleter.node(commandName, + TreeCompleter.node(taskOptionsCompleter, fileNameCompleterNode))); + } else if (commandName.equals("@export")) { + final StringsCompleter taskOptionsCompleter = new StringsCompleter( + ExportCommandInterpreter.TASK_INFERENCES, ExportCommandInterpreter.TASK_KB + ); + nodes.add(TreeCompleter.node(commandName, + TreeCompleter.node(taskOptionsCompleter, fileNameCompleterNode))); + } else if (commandName.equals("@clear")) { + final StringsCompleter taskOptionsCompleter = new StringsCompleter(ClearCommandInterpreter.TASK_ALL, + ClearCommandInterpreter.TASK_INFERENCES, ClearCommandInterpreter.TASK_FACTS, + ClearCommandInterpreter.TASK_PREFIXES, ClearCommandInterpreter.TASK_RULES, + ClearCommandInterpreter.TASK_SOURCES); + nodes.add(TreeCompleter.node(commandName, TreeCompleter.node(taskOptionsCompleter))); + } else if (commandName.equals("@help")) { + nodes.add( + TreeCompleter.node(commandName, TreeCompleter.node(new StringsCompleter(registeredCommands)))); } else { - nodes.add(TreeCompleter.node(serializedCommandName)); + nodes.add(TreeCompleter.node(commandName)); } }); return new TreeCompleter(nodes); + } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index b27bda588..3c70b9744 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -27,17 +27,17 @@ public class ClearCommandInterpreter implements CommandInterpreter { - static final String TASK_ALL = "ALL"; - static final String TASK_INFERENCES = "INF"; - static final String TASK_FACTS = "FACTS"; - static final String TASK_RULES = "RULES"; - static final String TASK_SOURCES = "DATASOURCES"; - static final String TASK_PREFIXES = "PREFIXES"; + public static final String TASK_ALL = "ALL"; + public static final String TASK_INFERENCES = "INF"; + public static final String TASK_FACTS = "FACTS"; + public static final String TASK_RULES = "RULES"; + public static final String TASK_SOURCES = "DATASOURCES"; + public static final String TASK_PREFIXES = "PREFIXES"; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 1); - String task = Interpreter.extractNameArgument(command, 0, "task"); + final String task = Interpreter.extractNameArgument(command, 0, "task"); if (TASK_ALL.equals(task)) { interpreter.clearReasonerAndKnowledgeBase(); interpreter.printNormal("Knowledge base has been cleared; reasoner has been completely reset.\n"); @@ -45,17 +45,17 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio interpreter.getReasoner().resetReasoner(); interpreter.printNormal("Reasoner has been reset.\n"); } else if (TASK_FACTS.equals(task)) { - for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + for (final Fact fact : interpreter.getKnowledgeBase().getFacts()) { interpreter.getKnowledgeBase().removeStatement(fact); } interpreter.printNormal("All facts have been removed from the knowledge base.\n"); } else if (TASK_RULES.equals(task)) { - for (Rule rule : interpreter.getKnowledgeBase().getRules()) { + for (final Rule rule : interpreter.getKnowledgeBase().getRules()) { interpreter.getKnowledgeBase().removeStatement(rule); } interpreter.printNormal("All rules have been removed from the knowledge base.\n"); } else if (TASK_SOURCES.equals(task)) { - for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + for (final DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() .getDataSourceDeclarations()) { interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); } @@ -71,7 +71,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " TASK\n" // + " TASK: what to reset, possuble values:\n" // + " ALL: empty knowledge base and completely reset reasoner\n" // diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index 4ead3798d..3cffcf25f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -29,20 +29,20 @@ public class ExportCommandInterpreter implements CommandInterpreter { - static final String TASK_KB = "KB"; - static final String TASK_INFERENCES = "INFERENCES"; + public static final String TASK_KB = "KB"; + public static final String TASK_INFERENCES = "INFERENCES"; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 2); - String task = Interpreter.extractNameArgument(command, 0, "task"); - String fileName = Interpreter.extractStringArgument(command, 1, "filename"); + final String task = Interpreter.extractNameArgument(command, 0, "task"); + final String fileName = Interpreter.extractStringArgument(command, 1, "filename"); if (TASK_KB.equals(task)) { - exportKb(interpreter, fileName); + this.exportKb(interpreter, fileName); } else if (TASK_INFERENCES.equals(task)) { - exportInferences(interpreter, fileName); + this.exportInferences(interpreter, fileName); } else { throw new CommandExecutionException( "Unknown task " + task + ". Should be " + TASK_KB + " or " + TASK_INFERENCES); @@ -51,7 +51,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " TASK \"filename\" .\n" // + " TASK: what to export; can be KB or INFERENCES\n" // + " \"filename\": string path export file (suggested extension: .rls)\n"); @@ -62,14 +62,14 @@ public String getSynopsis() { return "export knowledgebase or inferences to a Rulewerk file"; } - private void exportInferences(Interpreter interpreter, String fileName) throws CommandExecutionException { - Timer timer = new Timer("export"); + private void exportInferences(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + final Timer timer = new Timer("export"); Correctness correctness; try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); correctness = interpreter.getReasoner().writeInferences(writer); timer.stop(); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } @@ -78,13 +78,13 @@ private void exportInferences(Interpreter interpreter, String fileName) throws C interpreter.printNormal(" This result is " + correctness + ".\n"); } - private void exportKb(Interpreter interpreter, String fileName) throws CommandExecutionException { - Timer timer = new Timer("export"); + private void exportKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + final Timer timer = new Timer("export"); try (Writer writer = interpreter.getFileWriter(fileName)) { timer.start(); interpreter.getKnowledgeBase().writeKnowledgeBase(writer); timer.stop(); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 952f3060c..1fe89e2e3 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -57,14 +57,14 @@ */ public class LoadCommandInterpreter implements CommandInterpreter { - static final String TASK_RLS = "RULES"; - static final String TASK_OWL = "OWL"; - static final String TASK_RDF = "RDF"; + public static final String TASK_RLS = "RULES"; + public static final String TASK_OWL = "OWL"; + public static final String TASK_RDF = "RDF"; static final String PREDICATE_ABOX = "ABOX"; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { String task; int pos = 0; if (command.getArguments().size() > 0 && command.getArguments().get(0).fromTerm().isPresent() @@ -75,7 +75,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio task = TASK_RLS; } - String fileName = Interpreter.extractStringArgument(command, pos, "filename"); + final String fileName = Interpreter.extractStringArgument(command, pos, "filename"); pos++; String rdfTriplePredicate = RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; @@ -94,16 +94,16 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio Interpreter.validateArgumentCount(command, pos); - int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); - int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); - int countDataSourceDeclarationsBefore = interpreter.getKnowledgeBase().getDataSourceDeclarations().size(); + final int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); + final int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); + final int countDataSourceDeclarationsBefore = interpreter.getKnowledgeBase().getDataSourceDeclarations().size(); if (TASK_RLS.equals(task)) { - loadKb(interpreter, fileName); + this.loadKb(interpreter, fileName); } else if (TASK_OWL.equals(task)) { - loadOwl(interpreter, fileName); + this.loadOwl(interpreter, fileName); } else if (TASK_RDF.equals(task)) { - loadRdf(interpreter, fileName, rdfTriplePredicate); + this.loadRdf(interpreter, fileName, rdfTriplePredicate); } else { throw new CommandExecutionException( "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); @@ -117,23 +117,23 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } - private void loadKb(Interpreter interpreter, String fileName) throws CommandExecutionException { + private void loadKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { try { - InputStream inputStream = interpreter.getFileInputStream(fileName); + final InputStream inputStream = interpreter.getFileInputStream(fileName); RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); - } catch (FileNotFoundException e) { + } catch (final FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); - } catch (ParsingException e) { + } catch (final ParsingException e) { throw new CommandExecutionException("Failed to parse Rulewerk file: " + e.getMessage(), e); } } - private void loadOwl(Interpreter interpreter, String fileName) throws CommandExecutionException { + private void loadOwl(final Interpreter interpreter, final String fileName) throws CommandExecutionException { final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); OWLOntology ontology; try { ontology = ontologyManager.loadOntologyFromOntologyDocument(new File(fileName)); - } catch (OWLOntologyCreationException e) { + } catch (final OWLOntologyCreationException e) { throw new CommandExecutionException("Problem loading OWL ontology: " + e.getMessage(), e); } interpreter.printNormal( @@ -157,20 +157,20 @@ private void loadOwl(Interpreter interpreter, String fileName) throws CommandExe interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); } - private void loadRdf(Interpreter interpreter, String fileName, String triplePredicateName) + private void loadRdf(final Interpreter interpreter, final String fileName, final String triplePredicateName) throws CommandExecutionException { try { - String baseIri = new File(fileName).toURI().toString(); + final String baseIri = new File(fileName).toURI().toString(); - Iterator formatsToTry = Arrays.asList(RDFFormat.NTRIPLES, RDFFormat.TURTLE, RDFFormat.RDFXML) - .iterator(); + final Iterator formatsToTry = Arrays + .asList(RDFFormat.NTRIPLES, RDFFormat.TURTLE, RDFFormat.RDFXML).iterator(); Model model = null; - List parseErrors = new ArrayList<>(); + final List parseErrors = new ArrayList<>(); while (model == null && formatsToTry.hasNext()) { - RDFFormat rdfFormat = formatsToTry.next(); + final RDFFormat rdfFormat = formatsToTry.next(); try { - InputStream inputStream = interpreter.getFileInputStream(fileName); - model = parseRdfFromStream(inputStream, rdfFormat, baseIri); + final InputStream inputStream = interpreter.getFileInputStream(fileName); + model = this.parseRdfFromStream(inputStream, rdfFormat, baseIri); interpreter.printNormal("Found RDF document in format " + rdfFormat.getName() + " ...\n"); } catch (RDFParseException | RDFHandlerException e) { parseErrors.add("Failed to parse as " + rdfFormat.getName() + ": " + e.getMessage()); @@ -178,20 +178,20 @@ private void loadRdf(Interpreter interpreter, String fileName, String triplePred } if (model == null) { String message = "Failed to parse RDF input:"; - for (String error : parseErrors) { + for (final String error : parseErrors) { message += "\n " + error; } throw new CommandExecutionException(message); } - RdfModelConverter rdfModelConverter = new RdfModelConverter(true, triplePredicateName); + final RdfModelConverter rdfModelConverter = new RdfModelConverter(true, triplePredicateName); rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); } } - private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, String baseIri) + private Model parseRdfFromStream(final InputStream inputStream, final RDFFormat rdfFormat, final String baseIri) throws RDFParseException, RDFHandlerException, IOException { final Model model = new LinkedHashModel(); final RDFParser rdfParser = Rio.createParser(rdfFormat); @@ -201,7 +201,7 @@ private Model parseRdfFromStream(InputStream inputStream, RDFFormat rdfFormat, S } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate]\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // From 43f4826d899cd6572f2509a71e6f2bb945a16d94 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 22:02:49 +0200 Subject: [PATCH 1060/1255] correct usage message --- .../commands/ExitCommandInterpreter.java | 2 +- .../commands/AddSourceCommandInterpreter.java | 188 ++++++------ .../commands/ClearCommandInterpreter.java | 2 +- .../commands/LoadCommandInterpreter.java | 2 +- .../RemoveSourceCommandInterpreter.java | 16 +- .../commands/RetractCommandInterpreter.java | 150 ++++----- .../commands/SetPrefixCommandInterpreter.java | 12 +- .../RetractCommandInterpreterTest.java | 288 +++++++++--------- 8 files changed, 330 insertions(+), 330 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index b6506b81b..f9214afae 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -61,7 +61,7 @@ public ExitCommandInterpreter(final Shell shell) { @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: " + commandName + ".\n"); + interpreter.printNormal("Usage: @" + commandName + ".\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java index bf7e2aad8..004023a65 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -1,94 +1,94 @@ -package org.semanticweb.rulewerk.commands; - -/*- - * #%L - * Rulewerk command execution support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.DataSource; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class AddSourceCommandInterpreter implements CommandInterpreter { - - @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - Interpreter.validateArgumentCount(command, 2); - String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); - PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, - "source declaration"); - - Predicate predicate = extractPredicate(predicateDeclaration); - DataSource dataSource = extractDataSource(sourceDeclaration, interpreter); - - if (dataSource.getRequiredArity().isPresent()) { - Integer requiredArity = dataSource.getRequiredArity().get(); - if (predicate.getArity() != requiredArity) { - throw new CommandExecutionException("Invalid arity " + predicate.getArity() + " for data source, " - + "expected " + requiredArity + "."); - } - } - - interpreter.getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); - } - - @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " []: .\n" - + " [] : the name of the predicate and its arity\n" - + " : a fact specifying a source declaration\n\n" - + "Note that every predicate can have multiple sources.\n"); - } - - @Override - public String getSynopsis() { - return "define a new external data source for a predicate"; - } - - static Predicate extractPredicate(String predicateDeclaration) throws CommandExecutionException { - String predicateName; - int arity; - try { - int openBracket = predicateDeclaration.indexOf('['); - int closeBracket = predicateDeclaration.indexOf(']'); - predicateName = predicateDeclaration.substring(0, openBracket); - String arityString = predicateDeclaration.substring(openBracket + 1, closeBracket); - arity = Integer.parseInt(arityString); - } catch (IndexOutOfBoundsException | NumberFormatException e) { - throw new CommandExecutionException( - "Predicate declaration must have the format \"predicateName[number]\" but was \"" - + predicateDeclaration + "\"."); - } - return Expressions.makePredicate(predicateName, arity); - } - - static DataSource extractDataSource(PositiveLiteral sourceDeclaration, Interpreter interpreter) - throws CommandExecutionException { - try { - return interpreter.getParserConfiguration() - .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); - } catch (ParsingException e) { - throw new CommandExecutionException("Could not parse source declaration: " + e.getMessage()); - } - } - -} +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AddSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + final String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + final PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + + final Predicate predicate = extractPredicate(predicateDeclaration); + final DataSource dataSource = extractDataSource(sourceDeclaration, interpreter); + + if (dataSource.getRequiredArity().isPresent()) { + final Integer requiredArity = dataSource.getRequiredArity().get(); + if (predicate.getArity() != requiredArity) { + throw new CommandExecutionException("Invalid arity " + predicate.getArity() + " for data source, " + + "expected " + requiredArity + "."); + } + } + + interpreter.getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " : a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources.\n"); + } + + @Override + public String getSynopsis() { + return "define a new external data source for a predicate"; + } + + static Predicate extractPredicate(final String predicateDeclaration) throws CommandExecutionException { + String predicateName; + int arity; + try { + final int openBracket = predicateDeclaration.indexOf('['); + final int closeBracket = predicateDeclaration.indexOf(']'); + predicateName = predicateDeclaration.substring(0, openBracket); + final String arityString = predicateDeclaration.substring(openBracket + 1, closeBracket); + arity = Integer.parseInt(arityString); + } catch (IndexOutOfBoundsException | NumberFormatException e) { + throw new CommandExecutionException( + "Predicate declaration must have the format \"predicateName[number]\" but was \"" + + predicateDeclaration + "\"."); + } + return Expressions.makePredicate(predicateName, arity); + } + + static DataSource extractDataSource(final PositiveLiteral sourceDeclaration, final Interpreter interpreter) + throws CommandExecutionException { + try { + return interpreter.getParserConfiguration() + .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); + } catch (final ParsingException e) { + throw new CommandExecutionException("Could not parse source declaration: " + e.getMessage()); + } + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java index 3c70b9744..484d90c41 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -72,7 +72,7 @@ public void run(final Command command, final Interpreter interpreter) throws Com @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " TASK\n" // + interpreter.printNormal("Usage: @" + commandName + " TASK .\n" // + " TASK: what to reset, possuble values:\n" // + " ALL: empty knowledge base and completely reset reasoner\n" // + " INF: reset reasoner to clear all loaded data and inferences\n" // diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index 1fe89e2e3..c0a223524 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -202,7 +202,7 @@ private Model parseRdfFromStream(final InputStream inputStream, final RDFFormat @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate]\n" // + interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate] .\n" // + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + " RULES to load a knowledge base in Rulewerk rls format\n" // + " OWL to load an OWL ontology and convert it to facts and rules\n" // diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java index 9a7c606a4..c835bd635 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -30,22 +30,22 @@ public class RemoveSourceCommandInterpreter implements CommandInterpreter { @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { if (command.getArguments().size() == 0 || command.getArguments().size() > 2) { throw new CommandExecutionException("This command requires one or two arguments."); } - String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); - Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + final String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + final Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); DataSource dataSource = null; if (command.getArguments().size() == 2) { - PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + final PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, "source declaration"); dataSource = AddSourceCommandInterpreter.extractDataSource(sourceDeclaration, interpreter); } if (dataSource != null) { - DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { interpreter.printNormal("Removed specified data source declaration.\n"); } else { @@ -53,7 +53,7 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } } else { int count = 0; - for (DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + for (final DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() .getDataSourceDeclarations()) { if (dataSourceDeclaration.getPredicate().equals(predicate)) { interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); @@ -66,8 +66,8 @@ public void run(Command command, Interpreter interpreter) throws CommandExecutio } @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " []: .\n" + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + " [] : the name of the predicate and its arity\n" + " (optional): a fact specifying a source declaration\n\n" + "Note that every predicate can have multiple sources.\n"); diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java index 2e74580c8..6add109ca 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -1,75 +1,75 @@ -package org.semanticweb.rulewerk.commands; - -/*- - * #%L - * Rulewerk command execution support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.Argument; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; - -public class RetractCommandInterpreter implements CommandInterpreter { - - @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - int factCount = 0; - int ruleCount = 0; - for (Argument argument : command.getArguments()) { - if (argument.fromPositiveLiteral().isPresent()) { - PositiveLiteral literal = argument.fromPositiveLiteral().get(); - Fact fact; - try { - fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); - } catch (IllegalArgumentException e) { - throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); - } - factCount += interpreter.getKnowledgeBase().removeStatement(fact); - } else if (argument.fromRule().isPresent()) { - ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); - } else { // implies argument.fromTerm().isPresent() - String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); - Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); - for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { - if (predicate.equals(fact.getPredicate())) { - factCount += interpreter.getKnowledgeBase().removeStatement(fact); - } - } - } - } - - interpreter.printNormal("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); - } - - @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" - + " fact or rule: statement(s) to be removed from the knowledge base, or a predicate declaration\n" - + " of the form name[arity] to remove all facts for that predicate.\n" - + "Reasoning needs to be invoked after finishing the removal of statements.\n"); - } - - @Override - public String getSynopsis() { - return "remove facts and rules to the knowledge base"; - } - -} +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class RetractCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } else if (argument.fromRule().isPresent()) { + ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); + } else { // implies argument.fromTerm().isPresent() + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + if (predicate.equals(fact.getPredicate())) { + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } + } + } + } + + interpreter.printNormal("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be removed from the knowledge base, or a predicate declaration\n" + + " of the form name[arity] to remove all facts for that predicate.\n" + + "Reasoning needs to be invoked after finishing the removal of statements.\n"); + } + + @Override + public String getSynopsis() { + return "remove facts and rules to the knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java index c713f7789..583621c68 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -26,22 +26,22 @@ public class SetPrefixCommandInterpreter implements CommandInterpreter { @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { Interpreter.validateArgumentCount(command, 2); - String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); - String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); + final String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); + final String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); try { interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, prefixIri); - } catch (PrefixDeclarationException e) { // practically impossible + } catch (final PrefixDeclarationException e) { // practically impossible throw new CommandExecutionException("Setting prefix failed: " + e.getMessage()); } } @Override - public void printHelp(String commandName, Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " : .\n"); + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " : .\n"); } @Override diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java index f9a8189cc..cc693eb7b 100644 --- a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java @@ -1,144 +1,144 @@ -package org.semanticweb.rulewerk.commands; - -/*- - * #%L - * Rulewerk command execution support - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.*; - -import java.io.StringWriter; -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Command; -import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.parser.ParsingException; - -public class RetractCommandInterpreterTest { - - @Test - public void correctUse_succeeds() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Term a = Expressions.makeAbstractConstant("a"); - Term x = Expressions.makeUniversalVariable("X"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Predicate r = Expressions.makePredicate("r", 1); - Fact fact = Expressions.makeFact(p, a); - Fact fact2 = Expressions.makeFact(q, a); - PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); - PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); - Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); - interpreter.getKnowledgeBase().addStatement(fact); - interpreter.getKnowledgeBase().addStatement(fact2); - interpreter.getKnowledgeBase().addStatement(rule); - - Command command = interpreter.parseCommand("@retract p(a) q(?X) :- r(?X) ."); - interpreter.runCommand(command); - List facts = interpreter.getKnowledgeBase().getFacts(); - List rules = interpreter.getKnowledgeBase().getRules(); - List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); - - assertEquals("retract", command.getName()); - assertEquals(2, command.getArguments().size()); - assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); - assertTrue(command.getArguments().get(1).fromRule().isPresent()); - - assertEquals(Arrays.asList(fact2), facts); - assertTrue(rules.isEmpty()); - assertTrue(dataSourceDeclarations.isEmpty()); - } - - @Test - public void correctUse_retractPredicate_succeeds() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - Term a = Expressions.makeAbstractConstant("a"); - Term b = Expressions.makeAbstractConstant("b"); - Predicate p = Expressions.makePredicate("p", 1); - Predicate q = Expressions.makePredicate("q", 1); - Fact pa = Expressions.makeFact(p, a); - Fact pb = Expressions.makeFact(p, b); - Fact qa = Expressions.makeFact(q, a); - - interpreter.getKnowledgeBase().addStatement(pa); - interpreter.getKnowledgeBase().addStatement(pb); - interpreter.getKnowledgeBase().addStatement(qa); - - Command command = interpreter.parseCommand("@retract p[1] ."); - interpreter.runCommand(command); - List facts = interpreter.getKnowledgeBase().getFacts(); - List rules = interpreter.getKnowledgeBase().getRules(); - List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); - - assertEquals(Arrays.asList(qa), facts); - assertTrue(rules.isEmpty()); - assertTrue(dataSourceDeclarations.isEmpty()); - } - - @Test(expected = CommandExecutionException.class) - public void wrongArgumentTermNumber_fails() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@retract 42 ."); - interpreter.runCommand(command); - } - - @Test(expected = CommandExecutionException.class) - public void wrongArgumentTermStringNoPredicate_fails() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@retract \"string\" ."); - interpreter.runCommand(command); - } - - @Test(expected = CommandExecutionException.class) - public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - - Command command = interpreter.parseCommand("@retract p(?X) ."); - interpreter.runCommand(command); - } - - @Test - public void help_succeeds() throws ParsingException, CommandExecutionException { - StringWriter writer = new StringWriter(); - Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); - CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); - InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); - } - - @Test - public void synopsis_succeeds() throws ParsingException, CommandExecutionException { - CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); - InterpreterTest.checkSynopsisFormat(commandInterpreter); - } - -} +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RetractCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + Fact fact2 = Expressions.makeFact(q, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(fact2); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@retract p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("retract", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + assertEquals(Arrays.asList(fact2), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test + public void correctUse_retractPredicate_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term b = Expressions.makeAbstractConstant("b"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Fact pa = Expressions.makeFact(p, a); + Fact pb = Expressions.makeFact(p, b); + Fact qa = Expressions.makeFact(q, a); + + interpreter.getKnowledgeBase().addStatement(pa); + interpreter.getKnowledgeBase().addStatement(pb); + interpreter.getKnowledgeBase().addStatement(qa); + + Command command = interpreter.parseCommand("@retract p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals(Arrays.asList(qa), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermNumber_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract 42 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermStringNoPredicate_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} From 4c696f370d1b229804a5513f59499794b7e7fb21 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Wed, 2 Sep 2020 22:06:11 +0200 Subject: [PATCH 1061/1255] correct unit test exit command help message --- .../client/shell/commands/ExitCommandInterpreter.java | 2 +- .../client/shell/commands/ExitCommandInterpreterTest.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java index f9214afae..ceb284828 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -61,7 +61,7 @@ public ExitCommandInterpreter(final Shell shell) { @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + ".\n"); + interpreter.printNormal("Usage: @" + commandName + " .\n"); } @Override diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java index 33da1aa4c..2a2fd0baf 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -61,10 +61,10 @@ public void help_succeeds() throws ParsingException, CommandExecutionException { final Interpreter interpreterSpy = Mockito.spy(interpreter); commandInterpreter.printHelp("commandname", interpreterSpy); - Mockito.verify(interpreterSpy).printNormal("Usage: commandname.\n"); + Mockito.verify(interpreterSpy).printNormal("Usage: @commandname .\n"); final String result = writer.toString(); - assertEquals("Usage: commandname.\n", result); + assertEquals("Usage: @commandname .\n", result); } @Test From 90bc16c5ef7bb4a606c72dd0541dc633f1b2d519 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 15:42:41 +0200 Subject: [PATCH 1062/1255] correct help messages for @query and @export commands --- .../commands/ExportCommandInterpreter.java | 4 +- .../commands/QueryCommandInterpreter.java | 72 +++++++++---------- 2 files changed, 38 insertions(+), 38 deletions(-) diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java index 3cffcf25f..30455240f 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -52,9 +52,9 @@ public void run(final Command command, final Interpreter interpreter) throws Com @Override public void printHelp(final String commandName, final Interpreter interpreter) { - interpreter.printNormal("Usage: @" + commandName + " TASK \"filename\" .\n" // + interpreter.printNormal("Usage: @" + commandName + " TASK \"file\" .\n" // + " TASK: what to export; can be KB or INFERENCES\n" // - + " \"filename\": string path export file (suggested extension: .rls)\n"); + + " \"file\": path to export file (suggested extension: .rls), enclosed in quotes\n"); } @Override diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java index 310505656..35e53a67a 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -48,25 +48,25 @@ public class QueryCommandInterpreter implements CommandInterpreter { private String csvFile; @Override - public void run(Command command, Interpreter interpreter) throws CommandExecutionException { - processArguments(command.getArguments()); + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + this.processArguments(command.getArguments()); - if (doCount) { - printCountQueryResults(interpreter); - } else if (csvFile == null) { - printQueryResults(interpreter); + if (this.doCount) { + this.printCountQueryResults(interpreter); + } else if (this.csvFile == null) { + this.printQueryResults(interpreter); } else { - exportQueryResults(interpreter); + this.exportQueryResults(interpreter); } } @Override - public void printHelp(String commandName, Interpreter interpreter) { + public void printHelp(final String commandName, final Interpreter interpreter) { interpreter.printNormal( - "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV ] .\n" + "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV <\"file\">] .\n" + " query literal: positive literal, possibly with ?queryVariables\n" + " limit: maximal number of results to be shown\n" - + " filename: string path to CSV file for exporting query results\n"); + + " \"file\": path to CSV file for exporting query results, enclosed in quotes\n"); } @Override @@ -74,19 +74,19 @@ public String getSynopsis() { return "print or export query results"; } - private void processArguments(List arguments) throws CommandExecutionException { + private void processArguments(final List arguments) throws CommandExecutionException { int pos = 0; - limit = -1; - doCount = false; - csvFile = null; + this.limit = -1; + this.doCount = false; + this.csvFile = null; if (arguments.size() > 0 && KEYWORD_COUNT.equals(arguments.get(0).fromTerm().orElse(null))) { - doCount = true; + this.doCount = true; pos++; } if (arguments.size() > pos && arguments.get(pos).fromPositiveLiteral().isPresent()) { - queryLiteral = arguments.get(pos).fromPositiveLiteral().get(); + this.queryLiteral = arguments.get(pos).fromPositiveLiteral().get(); pos++; } else { throw new CommandExecutionException("A query literal must be given."); @@ -96,18 +96,18 @@ private void processArguments(List arguments) throws CommandExecutionE if (arguments.size() > pos + 1 && KEYWORD_LIMIT.equals(arguments.get(pos).fromTerm().orElse(null)) && arguments.get(pos + 1).fromTerm().isPresent()) { try { - limit = Terms.extractInt(arguments.get(pos + 1).fromTerm().get()); + this.limit = Terms.extractInt(arguments.get(pos + 1).fromTerm().get()); pos += 2; - } catch (IllegalArgumentException e) { + } catch (final IllegalArgumentException e) { throw new CommandExecutionException( "Invalid limit given: " + arguments.get(pos + 1).fromTerm().get()); } } else if (arguments.size() > pos + 1 && KEYWORD_TOFILE.equals(arguments.get(pos).fromTerm().orElse(null)) && arguments.get(pos + 1).fromTerm().isPresent()) { try { - csvFile = Terms.extractString(arguments.get(pos + 1).fromTerm().get()); + this.csvFile = Terms.extractString(arguments.get(pos + 1).fromTerm().get()); pos += 2; - } catch (IllegalArgumentException e) { + } catch (final IllegalArgumentException e) { throw new CommandExecutionException( "Invalid filename given: " + arguments.get(pos + 1).fromTerm().get()); } @@ -117,17 +117,17 @@ private void processArguments(List arguments) throws CommandExecutionE } } - private void printCountQueryResults(Interpreter interpreter) throws CommandExecutionException { - if (limit != -1) { + private void printCountQueryResults(final Interpreter interpreter) throws CommandExecutionException { + if (this.limit != -1) { throw new CommandExecutionException("LIMIT not supported with COUNT"); } - if (csvFile != null) { + if (this.csvFile != null) { throw new CommandExecutionException("COUNT results cannot be exported to CSV"); } - Timer timer = new Timer("query"); + final Timer timer = new Timer("query"); timer.start(); - QueryAnswerCount count = interpreter.getReasoner().countQueryAnswers(queryLiteral); + final QueryAnswerCount count = interpreter.getReasoner().countQueryAnswers(this.queryLiteral); timer.stop(); interpreter.printNormal(String.valueOf(count.getCount()) + "\n"); @@ -135,14 +135,14 @@ private void printCountQueryResults(Interpreter interpreter) throws CommandExecu interpreter.printNormal(" This result is " + count.getCorrectness() + ".\n"); } - private void printQueryResults(Interpreter interpreter) throws CommandExecutionException { - LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(queryLiteral, interpreter.getWriter(), + private void printQueryResults(final Interpreter interpreter) throws CommandExecutionException { + final LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(this.queryLiteral, interpreter.getWriter(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); - Timer timer = new Timer("query"); + final Timer timer = new Timer("query"); timer.start(); - try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(queryLiteral, true)) { - while (printer.getResultCount() != limit && answers.hasNext()) { + try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(this.queryLiteral, true)) { + while (printer.getResultCount() != this.limit && answers.hasNext()) { printer.write(answers.next()); } timer.stop(); @@ -155,22 +155,22 @@ private void printQueryResults(Interpreter interpreter) throws CommandExecutionE printer.getResultCount() + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms."); } interpreter.printNormal(" Results are " + answers.getCorrectness() + ".\n"); - } catch (IOException e) { + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } } - private void exportQueryResults(Interpreter interpreter) throws CommandExecutionException { - if (limit != -1) { + private void exportQueryResults(final Interpreter interpreter) throws CommandExecutionException { + if (this.limit != -1) { throw new CommandExecutionException("LIMIT not supported for CSV export"); } - Timer timer = new Timer("query"); + final Timer timer = new Timer("query"); timer.start(); Correctness correctness; try { - correctness = interpreter.getReasoner().exportQueryAnswersToCsv(queryLiteral, csvFile, true); - } catch (IOException e) { + correctness = interpreter.getReasoner().exportQueryAnswersToCsv(this.queryLiteral, this.csvFile, true); + } catch (final IOException e) { throw new CommandExecutionException(e.getMessage(), e); } timer.stop(); From 123531c2d1c4a4abc9f1d259a91a6387ff5f8870 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:21:24 +0200 Subject: [PATCH 1063/1255] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 5d183be1f..f8619445b 100644 --- a/README.md +++ b/README.md @@ -56,3 +56,4 @@ Development Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. +* To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 622e39dc95b09d14bd96fbffe9db11bcc7dcdf0f Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:23:03 +0200 Subject: [PATCH 1064/1255] remove unused dependency from rulewerk-client pom --- rulewerk-client/pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index f05da9f94..d8bcb3d67 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -68,11 +68,6 @@ jansi ${jansi.version}
    - From e19ac6a57742ed8ec7712db9dfeea6aef5123d8c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:25:50 +0200 Subject: [PATCH 1065/1255] update to new vlog-java release version 1.3.4 --- rulewerk-vlog/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 5e767200f..fa7921ee2 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -17,7 +17,7 @@ Bindings for the VLog reasoner backend. - 1.3.3 + 1.3.4 vlog-java From 440d3cdefb135fb65b8839273505eb0eced73eaa Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:34:47 +0200 Subject: [PATCH 1066/1255] update version to release version 0.7.7 --- README.md | 4 ++-- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 12 files changed, 13 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index f8619445b..221f3a1b0 100644 --- a/README.md +++ b/README.md @@ -9,13 +9,13 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.6.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.7.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` org.semanticweb.rulewerk rulewerk-core - 0.6.0 + 0.7.0 ``` diff --git a/coverage/pom.xml b/coverage/pom.xml index 76ca8882d..40e671d48 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 coverage diff --git a/pom.xml b/pom.xml index bd2ca0a0d..300fb51a4 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index d8bcb3d67..f7b3e3a7c 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 97509c6c6..599a526a5 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 34701907a..dc562d9ab 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 975a3b56d..739f580cb 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index e5621cfbb..34ef21c2d 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index d351dd2c3..2b50c042d 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index 75487af58..c88769034 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index fe97c337b..ffed1fc13 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index fa7921ee2..53fc07b13 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0-SNAPSHOT + 0.7.0 rulewerk-vlog From a5573c864a4f70868ea666b96911bdf1a119d54c Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 16:49:28 +0200 Subject: [PATCH 1067/1255] use forward slash in path auto-completion --- .../rulewerk/client/shell/DefaultShellConfiguration.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java index f893512f7..5a82c72e6 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -68,6 +68,8 @@ LineReaderBuilder getDefaultLineReaderConfiguration(final LineReaderBuilder line */ lineReaderBuilder.option(LineReader.Option.INSERT_TAB, false); lineReaderBuilder.option(LineReader.Option.AUTO_FRESH_LINE, true); + lineReaderBuilder.option(LineReader.Option.USE_FORWARD_SLASH, true); + return lineReaderBuilder; } From b9db5368d02648b53fb1b6c22a480c9db4ae71e8 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 17:51:33 +0200 Subject: [PATCH 1068/1255] comment out building unreleased version of VLog --- .travis.yml | 80 ++++++++++++++++++++++++++--------------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/.travis.yml b/.travis.yml index cb87765d8..3259c75de 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,40 +1,40 @@ -language: java -os: linux -jobs: - include: - - os: linux - dist: bionic - jdk: openjdk11 - after_success: - - mvn clean test jacoco:report coveralls:report - - - os: linux - dist: xenial - addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-6 - - g++-6 - - libstdc++6 - env: CC=gcc-6 CXX=g++-6 - jdk: openjdk8 - - - os: osx - osx_image: xcode10.2 - allow_failures: - - dist: trusty - -## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -before_install: -# # explicitly avoid bash as travis screws with .bashrc, -# # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 - - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" - -install: mvn install $OPTIONS -DskipTests=true - -cache: - directories: - - ./local_builds - - $HOME/.m2 +language: java +os: linux +jobs: + include: + - os: linux + dist: bionic + jdk: openjdk11 + after_success: + - mvn clean test jacoco:report coveralls:report + + - os: linux + dist: xenial + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - gcc-6 + - g++-6 + - libstdc++6 + env: CC=gcc-6 CXX=g++-6 + jdk: openjdk8 + + - os: osx + osx_image: xcode10.2 + allow_failures: + - dist: trusty + +## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar +before_install: +# # explicitly avoid bash as travis screws with .bashrc, +# # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 +# - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" + +install: mvn install $OPTIONS -DskipTests=true + +cache: + directories: + - ./local_builds + - $HOME/.m2 From fc16aaeb3abf7e988dd9088a5789b7f265f0ea6c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 18:20:29 +0200 Subject: [PATCH 1069/1255] Parser: treat imports and data sources relatively to current file --- .../commands/LoadCommandInterpreter.java | 13 ++-- .../implementation/TridentDataSource.java | 11 +++- .../rulewerk/parser/DirectiveHandler.java | 14 +++-- .../rulewerk/parser/ParserConfiguration.java | 53 ++++++++++++++-- .../CsvFileDataSourceDeclarationHandler.java | 9 +-- .../DataSourceDeclarationHandler.java | 63 ++++++++++++++++--- .../RdfFileDataSourceDeclarationHandler.java | 9 +-- ...eryResultDataSourceDeclarationHandler.java | 6 +- .../TridentDataSourceDeclarationHandler.java | 9 +-- .../ImportFileDirectiveHandler.java | 17 ++--- .../ImportFileRelativeDirectiveHandler.java | 7 ++- .../parser/javacc/SubParserFactory.java | 4 +- .../rulewerk/parser/DirectiveHandlerTest.java | 9 +-- .../parser/RuleParserDataSourceTest.java | 31 +++++---- .../rulewerk/parser/RuleParserTest.java | 45 ++++++++++++- .../src/test/resources/subdir/facts.rls | 4 ++ .../src/test/resources/subdir/parent.rls | 1 + .../src/test/resources/subdir/sibling.rls | 1 + 18 files changed, 236 insertions(+), 70 deletions(-) create mode 100644 rulewerk-parser/src/test/resources/subdir/facts.rls create mode 100644 rulewerk-parser/src/test/resources/subdir/parent.rls create mode 100644 rulewerk-parser/src/test/resources/subdir/sibling.rls diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java index c0a223524..c8a2950c6 100644 --- a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -45,13 +45,15 @@ import org.semanticweb.rulewerk.core.model.api.Command; import org.semanticweb.rulewerk.core.model.api.TermType; import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** * Interpreter for the load command. - * + * * @author Markus Kroetzsch * */ @@ -120,7 +122,10 @@ public void run(final Command command, final Interpreter interpreter) throws Com private void loadKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { try { final InputStream inputStream = interpreter.getFileInputStream(fileName); - RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream); + final File file = new File(fileName); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration() + .setImportBasePath(file.getParent()); + RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream, parserConfiguration); } catch (final FileNotFoundException e) { throw new CommandExecutionException(e.getMessage(), e); } catch (final ParsingException e) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 54cfba4a8..213c7df2f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -34,7 +34,7 @@ * storage utility. This is the recommended data source for large RDF * datasets in the VLog reasoner. Trident databases are generated from RDF input * files in a batch process using the Trident tool. - * + * * @author Markus Kroetzsch * */ @@ -63,6 +63,11 @@ public Fact getDeclarationFact() { Expressions.makeDatatypeConstant(filePath, PrefixDeclarationRegistry.XSD_STRING)); } + @Override + public String toString() { + return "[TridentDataSource [tridentFile=" + this.filePath + "]"; + } + @Override public void accept(DataSourceConfigurationVisitor visitor) throws IOException { visitor.visit(this); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 2a8de3aa9..15b5914e9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -97,16 +97,22 @@ public static String validateStringArgument(final Argument argument, final Strin * * @param argument the argument to validate * @param description a description of the argument, used in constructing the - * error message. + * error message + * @param importBasePath the path that relative file names are resolved against * - * @throws ParsingException when the given argument is not a valid file path. + * @throws ParsingException when the given argument is not a valid file path * - * @return the File corresponding to the contained file path. + * @return the File corresponding to the contained file path */ - public static File validateFilenameArgument(final Argument argument, final String description) + public static File validateFilenameArgument(final Argument argument, final String description, final String importBasePath) throws ParsingException { String fileName = DirectiveHandler.validateStringArgument(argument, description); File file = new File(fileName); + + if (!file.isAbsolute() || importBasePath.isEmpty()) { + file = new File(importBasePath + File.separator + fileName); + } + try { // we don't care about the actual path, just that there is one. file.toPath(); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 0bf66066d..15d05f3f3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -57,12 +57,12 @@ public class ParserConfiguration { /** * The registered data sources. */ - private final HashMap dataSources = new HashMap<>(); + private HashMap dataSources = new HashMap<>(); /** * The registered datatypes. */ - private final HashMap datatypes = new HashMap<>(); + private HashMap datatypes = new HashMap<>(); /** * The registered configurable literals. @@ -74,6 +74,28 @@ public class ParserConfiguration { */ private HashMap> directives = new HashMap<>(); + /** + * The current base path to resolve imports against. Defaults to the current + * working directory. + */ + private String importBasePath = System.getProperty("user.dir"); + + public ParserConfiguration() { + } + + /** + * Copy constructor. + * + * @param other {@link ParserConfiguration} to copy + */ + public ParserConfiguration(ParserConfiguration other) { + this.allowNamedNulls = other.allowNamedNulls; + this.dataSources = new HashMap<>(other.dataSources); + this.literals = new HashMap<>(other.literals); + this.directives = new HashMap<>(other.directives); + this.importBasePath = new String(other.importBasePath); + } + /** * Register a new (type of) Data Source. * @@ -121,7 +143,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLit throw new ParsingException("Data source \"" + declaration.getPredicate().getName() + "\" is not known."); } - return handler.handleDataSourceDeclaration(declaration.getArguments()); + return handler.handleDataSourceDeclaration(declaration.getArguments(), this.importBasePath); } /** @@ -308,4 +330,25 @@ public ParserConfiguration disallowNamedNulls() { public boolean isParsingOfNamedNullsAllowed() { return this.allowNamedNulls; } + + /** + * Get the base path for file imports. + * + * @return the path that relative imports will be resolved against. + */ + public String getImportBasePath() { + return this.importBasePath; + } + + /** + * Set a new base path for file imports. + * + * @param importBasePath path that relative imports will be resolved against. + */ + public ParserConfiguration setImportBasePath(String importBasePath) { + this.importBasePath = importBasePath; + + return this; + } + } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 185f073ca..ceac5b156 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,9 +35,10 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); - String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "CSV file name"); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "CSV file name", + importBasePath); try { return new CsvFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java index 88801c331..1aa6a824c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java @@ -1,5 +1,6 @@ package org.semanticweb.rulewerk.parser.datasources; +import java.io.File; import java.net.URL; import java.util.List; @@ -12,9 +13,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,7 +37,19 @@ @FunctionalInterface public interface DataSourceDeclarationHandler { - DataSource handleDataSourceDeclaration(List terms) throws ParsingException; + /** + * Handle a data source declaration. + * + * @param terms the list of arguments given in the declaration + * @param importBasePath the base path that relative imports will be resolved + * against + * + * @throws ParsingException when the arguments are unsuitable for the data + * source. + * + * @return a DataSource instance. + */ + DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException; /** * Validate the provided number of arguments to the source declaration. @@ -56,7 +69,7 @@ public static void validateNumberOfArguments(final List terms, final int n /** * Returns the string content of the given term, or reports an error if the term * is not an xsd:string. - * + * * @param term the term to be processed * @param parameterName the string name of the parameter to be used in error * messages @@ -67,14 +80,14 @@ public static String validateStringArgument(Term term, String parameterName) thr try { return Terms.extractString(term); } catch (IllegalArgumentException e) { - throw makeParameterParsingException(term, parameterName, e); + throw makeParameterParsingException(term, parameterName, "String", e); } } /** * Returns the URL represented by the given term, or reports an error if no * valid URL could be extracted from the term. - * + * * @param term the term to be processed * @param parameterName the string name of the parameter to be used in error * messages @@ -85,12 +98,42 @@ public static URL validateUrlArgument(Term term, String parameterName) throws Pa try { return Terms.extractUrl(term); } catch (IllegalArgumentException e) { - throw makeParameterParsingException(term, parameterName, e); + throw makeParameterParsingException(term, parameterName, "URL", e); + } + } + + /** + * Returns the File name represented by the given term, or reports an error if + * no valid File name could be extracted from the term. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @param importBasePath the base path that relative paths will be resolved + * against + * + * @throws ParsingException when the term was not a valid file path + * @return the extracted file path + */ + public static String validateFileNameArgument(Term term, String parameterName, String importBasePath) + throws ParsingException { + File file; + + try { + file = new File(Terms.extractString(term)); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, "File name", e); + } + + if (file.isAbsolute() || importBasePath.isEmpty()) { + return file.getPath(); } + return importBasePath + File.separator + file.getPath(); } - static ParsingException makeParameterParsingException(Term term, String parameterName, Throwable cause) { - return new ParsingException("Expected " + parameterName + " to be a string. Found " + term.toString() + ".", - cause); + static ParsingException makeParameterParsingException(Term term, String parameterName, String type, + Throwable cause) { + return new ParsingException( + "Expected " + parameterName + " to be a " + type + ". Found " + term.toString() + ".", cause); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 36a3738c4..99b85132c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,9 +35,10 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); - String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "RDF file name"); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "RDF file name", + importBasePath); try { return new RdfFileDataSource(fileName); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 3a405d83c..5faca3ad9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,7 +35,7 @@ */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 3); URL endpoint = DataSourceDeclarationHandler.validateUrlArgument(terms.get(0), "SPARQL endpoint URL"); String variables = DataSourceDeclarationHandler.validateStringArgument(terms.get(1), diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java index 830d97c59..6640a414e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -34,9 +34,10 @@ */ public class TridentDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDataSourceDeclaration(List terms) throws ParsingException { + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); - String fileName = DataSourceDeclarationHandler.validateStringArgument(terms.get(0), "path to Trident database"); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), + "path to Trident database", importBasePath); return new TridentDataSource(fileName); } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 403238df4..262815779 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -42,15 +42,18 @@ public class ImportFileDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(final List arguments, final SubParserFactory subParserFactory) throws ParsingException { + final ParserConfiguration parserConfiguration = new ParserConfiguration( + getParserConfiguration(subParserFactory)); DirectiveHandler.validateNumberOfArguments(arguments, 1); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); - KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); - ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + final File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file", + parserConfiguration.getImportBasePath()); + final KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + parserConfiguration.setImportBasePath(file.getParent()); try { - knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + knowledgeBase.importRulesFile(file, (final InputStream stream, final KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration); }); } catch (RulewerkException | IOException | IllegalArgumentException e) { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index f70831417..115f4f4e3 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -44,11 +44,14 @@ public class ImportFileRelativeDirectiveHandler implements DirectiveHandler arguments, SubParserFactory subParserFactory) throws ParsingException { + final ParserConfiguration parserConfiguration = new ParserConfiguration( + getParserConfiguration(subParserFactory)); DirectiveHandler.validateNumberOfArguments(arguments, 1); PrefixDeclarationRegistry prefixDeclarationRegistry = getPrefixDeclarationRegistry(subParserFactory); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file", + parserConfiguration.getImportBasePath()); KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); - ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + parserConfiguration.setImportBasePath(file.getParent()); try { knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java index 9a615bf1b..a607ac22e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 66f89562a..eaedc2198 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -30,6 +30,7 @@ import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class DirectiveHandlerTest { + private static final String BASE_PATH = System.getProperty("user.dir"); private static final String STRING = "src/test/resources/facts.rls"; private static final Term STRINGTERM = Expressions.makeDatatypeConstant(STRING, PrefixDeclarationRegistry.XSD_STRING); @@ -42,7 +43,7 @@ public class DirectiveHandlerTest { public void validateStringArgument_stringArgument_succeeds() throws ParsingException { assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_STRING_ARGUMENT, "string argument")); } - + @Test(expected = ParsingException.class) public void validateStringArgument_stringArgument_throws() throws ParsingException { assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_INT_ARGUMENT, "string argument")); @@ -55,15 +56,15 @@ public void validateTermArgument_termArgument_succeeds() throws ParsingException @Test public void validateFilenameArgument_filename_succeeds() throws ParsingException { - assertEquals(new File(STRING), - DirectiveHandler.validateFilenameArgument(TERM_STRING_ARGUMENT, "filename argument")); + assertEquals(new File(BASE_PATH + File.separator + STRING), + DirectiveHandler.validateFilenameArgument(TERM_STRING_ARGUMENT, "filename argument", BASE_PATH)); } @Test public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { DirectiveHandler.validateFilenameArgument(Argument .term(Expressions.makeDatatypeConstant(STRING + "-nonexistent", PrefixDeclarationRegistry.XSD_STRING)), - "filename argument"); + "filename argument", BASE_PATH); } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 8e0c0abb4..6a22ed61b 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +23,7 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; +import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -49,6 +50,7 @@ import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; public class RuleParserDataSourceTest { + private static final String BASE_PATH = System.getProperty("user.dir") + File.separator; private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; private static final String EXAMPLE_CSV_FILE_PATH = "src/main/data/input/example.csv"; private static final String WIKIDATA_SPARQL_ENDPOINT_URI = "https://query.wikidata.org/sparql"; @@ -57,14 +59,14 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; - CsvFileDataSource csvds = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); + CsvFileDataSource csvds = new CsvFileDataSource(BASE_PATH + EXAMPLE_CSV_FILE_PATH); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; - RdfFileDataSource rdfds = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); + RdfFileDataSource rdfds = new RdfFileDataSource(BASE_PATH + EXAMPLE_RDF_FILE_PATH); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @@ -142,15 +144,18 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDataSourceDeclaration(ArgumentMatchers.>any()); + doReturn(source).when(handler).handleDataSourceDeclaration(ArgumentMatchers.>any(), + ArgumentMatchers.any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList( Expressions.makeDatatypeConstant("hello", PrefixDeclarationRegistry.XSD_STRING), Expressions.makeDatatypeConstant("world", PrefixDeclarationRegistry.XSD_STRING)); RuleParser.parse(input, parserConfiguration); + final String expectedImportBasePath = System.getProperty("user.dir"); - verify(handler).handleDataSourceDeclaration(ArgumentMatchers.eq(expectedArguments)); + verify(handler).handleDataSourceDeclaration(ArgumentMatchers.eq(expectedArguments), + ArgumentMatchers.eq(expectedImportBasePath)); } @Test @@ -159,9 +164,9 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep Predicate predicate1 = Expressions.makePredicate("p", 1); SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), "var", "?var wdt:P31 wd:Q5 ."); - DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource); - RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - assertEquals(dataSourceDeclaration1, kb.getDataSourceDeclarations().get(0)); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, dataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } @Test @@ -171,7 +176,8 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().setImportBasePath(""); + RuleParser.parseInto(kb, dataSourceDeclaration.toString(), parserConfiguration); assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } @@ -182,7 +188,8 @@ public void csvDataSourceDeclarationToStringParsingTest() throws ParsingExceptio CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); - RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().setImportBasePath(""); + RuleParser.parseInto(kb, dataSourceDeclaration.toString(), parserConfiguration); assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); } @@ -200,7 +207,7 @@ public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws Pa public void testTridentSource_succeeds() throws ParsingException, IOException { String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\") ."; DataSource parsed = RuleParser.parseDataSourceDeclaration(input).getDataSource(); - TridentDataSource expected = new TridentDataSource(EXAMPLE_TRIDENT_PATH); + TridentDataSource expected = new TridentDataSource(BASE_PATH + EXAMPLE_TRIDENT_PATH); assertEquals(expected, parsed); } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 3b3b77b60..797d6c59c 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -51,7 +51,9 @@ public class RuleParserTest implements ParserTestUtils { private final Variable z = Expressions.makeUniversalVariable("Z"); private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); private final Constant d = Expressions.makeAbstractConstant("http://example.org/d"); + private final Constant e = Expressions.makeAbstractConstant("https://example.org/e"); private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); + private final Constant xyz = Expressions.makeDatatypeConstant("xyz", PrefixDeclarationRegistry.XSD_STRING); private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); @@ -60,6 +62,9 @@ public class RuleParserTest implements ParserTestUtils { private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", abc); + private final PositiveLiteral fact4 = Expressions.makePositiveLiteral("https://example.org/s", e); + private final PositiveLiteral fact5 = Expressions.makePositiveLiteral("q", xyz); + private final PositiveLiteral fact6 = Expressions.makePositiveLiteral("http://example.org/p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); private final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); @@ -504,6 +509,42 @@ public void parse_relativeImportStatement_succeeds() throws ParsingException { assertEquals(expected, result); } + @Test + public void parse_importStatement_relativeImport_succeeds() throws ParsingException { + String input = "@import \"src/test/resources/subdir/sibling.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact4, fact5); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_importStatement_relativeParentImport_succeeds() throws ParsingException { + String input = "@import \"src/test/resources/subdir/parent.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_relativeImportStatement_relativeImport_succeeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/subdir/sibling.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact4, fact5); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_relativeImportStatement_relativeParentImport_succeeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/subdir/parent.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + @Test public void parse_import_renamesNamedNulls() throws ParsingException { String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; diff --git a/rulewerk-parser/src/test/resources/subdir/facts.rls b/rulewerk-parser/src/test/resources/subdir/facts.rls new file mode 100644 index 000000000..b7b5da5ef --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/facts.rls @@ -0,0 +1,4 @@ +@prefix ex: . + +ex:s(ex:e) . +q("xyz") . diff --git a/rulewerk-parser/src/test/resources/subdir/parent.rls b/rulewerk-parser/src/test/resources/subdir/parent.rls new file mode 100644 index 000000000..4abb16ff5 --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/parent.rls @@ -0,0 +1 @@ +@import "../facts.rls" . diff --git a/rulewerk-parser/src/test/resources/subdir/sibling.rls b/rulewerk-parser/src/test/resources/subdir/sibling.rls new file mode 100644 index 000000000..0d1d18bfb --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/sibling.rls @@ -0,0 +1 @@ +@import "facts.rls" . From ab4e96b6dc53052d956019e12979295ec6563c24 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 18:25:00 +0200 Subject: [PATCH 1070/1255] Add relative paths handling to release notes --- RELEASE-NOTES.md | 299 ++++++++++++++++++++++++----------------------- 1 file changed, 150 insertions(+), 149 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index d7fbee0a6..2b6ca3371 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,149 +1,150 @@ -Rulewerk Release Notes -====================== - -Rulewerk v0.7.0 ---------------- - -New features: -* New interactive Rulewerk shell for rule reasoning from the command line client -* Significant speedup in iterating over query results -* Support for using data from a Trident database, the recommended data source for large - RDF graphs in VLog -* More features to control how Rulewerk imports RDF data using rulewerk-rdf module -* New class `LiteralQueryResultPrinter` for pretty-printing query results - -Other improvements: -* Improved serialization of knowledge bases (using namespaces) -* Simple (non-IRI, namespace-less) predicate names can now include - and _ -* Nulls in input data (aka "blank nodes") are now properly skolemized for VLog -* InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where - two or more edges are the same. - -Breaking changes: -* The `RdfModelConverter` class from the rdf package is no longer static (and has more options) -* The `Serializer` class in the core package has been replaced by a new implementation - with a completely different interface. -* The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. -* The `DataSource` interface requires a new method to be implemented. - -Rulewerk v0.6.0 ---------------- - -Breaking changes: -* VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names - of the project have changed. -* In the examples package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no - longer exist. It can be replaced by - `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` -* The `FileDataSource` constructor and those of child classes (`CsvFileDataSource`, `RdfFileDataSource`) - now take the String path to a file instead of `File` object. -* The VLog backend has been moved to a new `rulewerk-vlog` module, - changing several import paths. `Reasoner.getInstance()` is - gone. Furthermore, `InMemoryDataSource` has become an abstract class, - use `VLogInMemoryDataSource` where applicable. - -New features: -* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` -* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` -* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` -* `Reasoner.getCorrectness()` returns the correctness result of the last reasoning task. -* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` -* Rules files may import other rules files using `@import` and - `@import-relative`, where the latter resolves relative IRIs using - the current base IRI, unless the imported file explicitly specifies - a different one. -* Named nulls of the form `_:name` are now allowed during parsing (but - may not occur in rule bodies). They are renamed to assure that they - are distinct on a per-file level. -* The parser allows custom directives to be implemented, and a certain - set of delimiters allows for custom literal expressions. - -Other improvements: -* Prefix declarations are now kept as part of the Knowledge Base and - are used to abbreviate names when exporting inferences. - -Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now - -VLog4j v0.5.0 -------------- - -Breaking changes: -* The data model for rules has been refined and changed: - * Instead of Constant, specific types of constants are used to capture abtract and data values - * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification - * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes - * Methods to access terms now use Java Streams and are unified across syntactic objects -* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` - -New features: -* New module vlog4j-client provides a stand-alone command line client jar for VLog4j -* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki -* The parser behaviour for data source declarations and certain datatype literals can be customised. - -Other improvements: -* Data model is better aligned with syntax supported by parser -* Java object Statements (rules, facts, datasource declarations) String representation is parseable -* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) -* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) -* Cobertura test coverage tool has been replaced by JaCoCo - -Bugfixes: -* Acyclicity checks work again without calling reason() first (issue #128) -* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) -* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) -* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. - -VLog4j v0.4.0 -------------- - -Breaking changes: -* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) -* The EdbIdbSeparation is obsolete and does no longer exist -* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier -* A new interface Fact has replaced the overly general PositiveLiteral in many places - -New features: -* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java -* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) -* New InMemoryDataSource for efficient in-memory fact loading -* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner -* Modifications to the knowledge base are taken into account by the reasoner -* New and updated example programs to illustrate use of syntax - -Other improvements: -* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) -* Faster and more memory-efficient loading of facts -* Better error reporting; improved use of exceptions -* Better logging, especially on the INFO level -* Better code structure and testing - -Bugfixes: -* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now - - -VLog4j v0.3.0 -------------- - -New features: -* Support for Graal data structures (conversion from Graal model to VLog model objects) -* Stratified negation: rule bodies are conjunctions of positive or negated literals -* SPARQL-based data sources: load remote data from SPARQL endpoints -* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined - -VLog4j v0.2.0 -------------- - -New features: -* supporting File data sources of N-Triples format (.nt file extension) -* supporting g-zipped data source files (.csv.gz, .nt.gz) - -VLog4j v0.1.0 -------------- - -Initial release. - -New features: -* Essential data models for rules and facts, and essential reasoner functionality -* support for reading from RDF files -* support for converting rules from OWL ontology, loaded with the OWL API +Rulewerk Release Notes +====================== + +Rulewerk v0.7.0 +--------------- + +New features: +* New interactive Rulewerk shell for rule reasoning from the command line client +* Significant speedup in iterating over query results +* Support for using data from a Trident database, the recommended data source for large + RDF graphs in VLog +* More features to control how Rulewerk imports RDF data using rulewerk-rdf module +* New class `LiteralQueryResultPrinter` for pretty-printing query results + +Other improvements: +* Improved serialization of knowledge bases (using namespaces) +* Simple (non-IRI, namespace-less) predicate names can now include - and _ +* Nulls in input data (aka "blank nodes") are now properly skolemized for VLog +* InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where + two or more edges are the same. + +Breaking changes: +* The `RdfModelConverter` class from the rdf package is no longer static (and has more options) +* The `Serializer` class in the core package has been replaced by a new implementation + with a completely different interface. +* The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. +* The `DataSource` interface requires a new method to be implemented. +* `@import`, `@import-relative`, and `@source` now treat relative paths as relative to the file they occur in, as opposed to the global working directory. + +Rulewerk v0.6.0 +--------------- + +Breaking changes: +* VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names + of the project have changed. +* In the examples package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no + longer exist. It can be replaced by + `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` +* The `FileDataSource` constructor and those of child classes (`CsvFileDataSource`, `RdfFileDataSource`) + now take the String path to a file instead of `File` object. +* The VLog backend has been moved to a new `rulewerk-vlog` module, + changing several import paths. `Reasoner.getInstance()` is + gone. Furthermore, `InMemoryDataSource` has become an abstract class, + use `VLogInMemoryDataSource` where applicable. + +New features: +* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` +* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` +* `Reasoner.getCorrectness()` returns the correctness result of the last reasoning task. +* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` +* Rules files may import other rules files using `@import` and + `@import-relative`, where the latter resolves relative IRIs using + the current base IRI, unless the imported file explicitly specifies + a different one. +* Named nulls of the form `_:name` are now allowed during parsing (but + may not occur in rule bodies). They are renamed to assure that they + are distinct on a per-file level. +* The parser allows custom directives to be implemented, and a certain + set of delimiters allows for custom literal expressions. + +Other improvements: +* Prefix declarations are now kept as part of the Knowledge Base and + are used to abbreviate names when exporting inferences. + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + +VLog4j v0.5.0 +------------- + +Breaking changes: +* The data model for rules has been refined and changed: + * Instead of Constant, specific types of constants are used to capture abtract and data values + * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification + * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes + * Methods to access terms now use Java Streams and are unified across syntactic objects +* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` + +New features: +* New module vlog4j-client provides a stand-alone command line client jar for VLog4j +* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki +* The parser behaviour for data source declarations and certain datatype literals can be customised. + +Other improvements: +* Data model is better aligned with syntax supported by parser +* Java object Statements (rules, facts, datasource declarations) String representation is parseable +* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) +* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) +* Cobertura test coverage tool has been replaced by JaCoCo + +Bugfixes: +* Acyclicity checks work again without calling reason() first (issue #128) +* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) +* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) +* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. + +VLog4j v0.4.0 +------------- + +Breaking changes: +* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) +* The EdbIdbSeparation is obsolete and does no longer exist +* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier +* A new interface Fact has replaced the overly general PositiveLiteral in many places + +New features: +* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java +* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) +* New InMemoryDataSource for efficient in-memory fact loading +* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner +* Modifications to the knowledge base are taken into account by the reasoner +* New and updated example programs to illustrate use of syntax + +Other improvements: +* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) +* Faster and more memory-efficient loading of facts +* Better error reporting; improved use of exceptions +* Better logging, especially on the INFO level +* Better code structure and testing + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + + +VLog4j v0.3.0 +------------- + +New features: +* Support for Graal data structures (conversion from Graal model to VLog model objects) +* Stratified negation: rule bodies are conjunctions of positive or negated literals +* SPARQL-based data sources: load remote data from SPARQL endpoints +* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined + +VLog4j v0.2.0 +------------- + +New features: +* supporting File data sources of N-Triples format (.nt file extension) +* supporting g-zipped data source files (.csv.gz, .nt.gz) + +VLog4j v0.1.0 +------------- + +Initial release. + +New features: +* Essential data models for rules and facts, and essential reasoner functionality +* support for reading from RDF files +* support for converting rules from OWL ontology, loaded with the OWL API From 3a423b7d5932079b80109244d2fedd1de054258f Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 18:49:16 +0200 Subject: [PATCH 1071/1255] add unit tests for @load command completers --- .../shell/DefaultShellConfigurationTest.java | 72 ++++++++++++++++++- 1 file changed, 71 insertions(+), 1 deletion(-) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index 87f369cd9..83c8a5a2a 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -42,6 +42,7 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; public class DefaultShellConfigurationTest { @@ -86,7 +87,7 @@ public void buildCompleterHelp() { } @Test - public void buildCompleterLoad() { + public void buildCompleterLoad_emptyLine() { final ArrayList readWords = new ArrayList(); readWords.add("@load"); @@ -95,6 +96,75 @@ public void buildCompleterLoad() { assertFalse(candidates.isEmpty()); final String tempFolderName = this.folder.getRoot().getName(); assertTrue(candidates.contains(tempFolderName)); + + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_OWL() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_OWL); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_RDF() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_RDF); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_RLS() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_RLS); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_file() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + final String tempFolderName = this.folder.getRoot().getName(); + readWords.add(tempFolderName); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); } private Set getCompleterCandidates(final ArrayList readWords, final String wordToComplete) { From a361d8b009444ac9940eebcedbf49eac91dcc22c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 18:55:08 +0200 Subject: [PATCH 1072/1255] Parser: Handle importing sibling files by bare name --- .../rulewerk/parser/ParserConfiguration.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 15d05f3f3..d094124f1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -343,10 +343,16 @@ public String getImportBasePath() { /** * Set a new base path for file imports. * - * @param importBasePath path that relative imports will be resolved against. + * @param importBasePath path that relative imports will be + * resolved against. If null, default to current working + * directory. */ public ParserConfiguration setImportBasePath(String importBasePath) { - this.importBasePath = importBasePath; + if (importBasePath != null) { + this.importBasePath = importBasePath; + } else { + this.importBasePath = System.getProperty("user.dir"); + } return this; } From 67fb40f2d21f9399f13875e84fe773bb33be441b Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 19:07:48 +0200 Subject: [PATCH 1073/1255] added unit test for @export and @clear completers --- .../shell/DefaultShellConfigurationTest.java | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java index 83c8a5a2a..a1f0bf7f5 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -42,6 +42,8 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.ClearCommandInterpreter; +import org.semanticweb.rulewerk.commands.ExportCommandInterpreter; import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; public class DefaultShellConfigurationTest { @@ -167,6 +169,98 @@ public void buildCompleterLoad_file() { assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); } + @Test + public void buildCompleterExport_emptyLine() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final HashSet expectedCandidates = new HashSet<>(); + expectedCandidates.add(ExportCommandInterpreter.TASK_INFERENCES); + expectedCandidates.add(ExportCommandInterpreter.TASK_KB); + + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterExport_task_INFERENCES() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add(ExportCommandInterpreter.TASK_INFERENCES); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_INFERENCES)); + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_KB)); + } + + @Test + public void buildCompleterExport_unknown() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add("unknown"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + + @Test + public void buildCompleterExport_task_KB() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add(ExportCommandInterpreter.TASK_KB); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_INFERENCES)); + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_KB)); + } + + @Test + public void buildCompleterClear_emptyLine() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final HashSet expectedCandidates = new HashSet<>(); + expectedCandidates.add(ClearCommandInterpreter.TASK_ALL); + expectedCandidates.add(ClearCommandInterpreter.TASK_FACTS); + expectedCandidates.add(ClearCommandInterpreter.TASK_INFERENCES); + expectedCandidates.add(ClearCommandInterpreter.TASK_PREFIXES); + expectedCandidates.add(ClearCommandInterpreter.TASK_RULES); + expectedCandidates.add(ClearCommandInterpreter.TASK_SOURCES); + + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterClear_unknown() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + readWords.add("unknown"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + + @Test + public void buildCompleterClear_task_ALL() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + readWords.add(ClearCommandInterpreter.TASK_ALL); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + private Set getCompleterCandidates(final ArrayList readWords, final String wordToComplete) { final List candidates = new ArrayList<>(); From c087f05aba4d0a2e3fff79d2c63c66498f05b7f4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 19:56:34 +0200 Subject: [PATCH 1074/1255] Core: Normalise paths for Trident data sources --- .../implementation/TridentDataSource.java | 15 ++++++++++++--- .../vlog/VLogDataSourceConfigurationVisitor.java | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 213c7df2f..182b04718 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -1,5 +1,7 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; +import java.io.File; + /*- * #%L * Rulewerk Core Components @@ -46,16 +48,23 @@ public class TridentDataSource implements ReasonerDataSource { public static final String declarationPredicateName = "trident"; final String filePath; + final String fileName; public TridentDataSource(final String filePath) { Validate.notBlank(filePath, "Path to Trident database cannot be blank!"); - this.filePath = filePath; + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = new File(filePath).getName(); } public String getPath() { return this.filePath; } + public String getName() { + return this.fileName; + } + + @Override public Fact getDeclarationFact() { Predicate predicate = Expressions.makePredicate(declarationPredicateName, 1); @@ -65,7 +74,7 @@ public Fact getDeclarationFact() { @Override public String toString() { - return "[TridentDataSource [tridentFile=" + this.filePath + "]"; + return "[TridentDataSource [tridentFile=" + this.fileName + "]"; } @Override @@ -90,7 +99,7 @@ public boolean equals(final Object obj) { return false; } final TridentDataSource other = (TridentDataSource) obj; - return this.filePath.equals(other.getPath()); + return this.fileName.equals(other.getName()); } } diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java index c2bc52c08..2e4f250df 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -82,7 +82,7 @@ public void visit(SparqlQueryResultDataSource dataSource) { public void visit(TridentDataSource dataSource) { this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" // + TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE + "\n" // - + "EDB%1$d_param0=" + dataSource.getPath() + "\n"; + + "EDB%1$d_param0=" + dataSource.getName() + "\n"; } @Override From b53d62c37c3aaef74ecb02928232151e3eb28651 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 3 Sep 2020 21:34:25 +0200 Subject: [PATCH 1075/1255] update version to snapsot 0.8.0-SNAPSHOT --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 40e671d48..cd9c75339 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT coverage diff --git a/pom.xml b/pom.xml index 300fb51a4..9367c117b 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index f7b3e3a7c..695e9a6a4 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 599a526a5..773e18561 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index dc562d9ab..3ab864a66 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 739f580cb..341378aec 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index 34ef21c2d..13631b3cb 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-graal diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 2b50c042d..be81cdf49 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index c88769034..e495e5247 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index ffed1fc13..e64742507 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 53fc07b13..a098e8a17 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.7.0 + 0.8.0-SNAPSHOT rulewerk-vlog From d0243381f229b61ee728c03bfb276a527e8173eb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 3 Sep 2020 22:15:14 +0200 Subject: [PATCH 1076/1255] Fix javadoc executable path on JDK >= 9 --- pom.xml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pom.xml b/pom.xml index 9367c117b..8a3d6d7b0 100644 --- a/pom.xml +++ b/pom.xml @@ -427,6 +427,9 @@ [9,) + + ${java.home}/bin/javadoc + From b6fc66d0fdaec2a358757913076047c37d72ba0e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:21:38 +0200 Subject: [PATCH 1077/1255] +nonabbr. RDF literal serialisations --- .../core/model/api/DatatypeConstant.java | 14 ++++++++++++++ .../implementation/DatatypeConstantImpl.java | 9 +++++++++ .../core/model/implementation/Serializer.java | 17 ++++++++++++++--- .../core/model/implementation/TermImplTest.java | 2 ++ 4 files changed, 39 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index 0de18e509..c64955d35 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -58,4 +58,18 @@ default TermType getType() { */ String getLexicalValue(); + /** + * Returns a string representation of this value that conforms to RDF + * serialisation formats such as Turtle. Turtle supports some abbreviations for + * common types, e.g., by leaving the type away for xsd:string literals, which + * can be enabled or disabled through the functions parameter. + * + * @param useAbbreviations if true, the result may use Turtle-style + * abbreviations to shorten the output where supported; + * otherwise the literal will always be serialised in + * full + * @return RDF-style string serialisation of the value + */ + String getRdfLiteralString(boolean useAbbreviations); + } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index 8696190c7..f4810bee4 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -58,6 +58,15 @@ public String getLexicalValue() { return this.lexicalValue; } + @Override + public String getRdfLiteralString(boolean useAbbreviations) { + if (useAbbreviations) { + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); + } else { + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstantNoAbbreviations(this)); + } + } + @Override public String toString() { return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 810b85a0e..741aba0a5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -427,12 +427,23 @@ public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOEx } else if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype())) { writer.write(datatypeConstant.getLexicalValue()); } else { - writer.write(getQuotedString(datatypeConstant.getLexicalValue())); - writer.write("^^"); - writer.write(getIri(datatypeConstant.getDatatype())); + writeDatatypeConstantNoAbbreviations(datatypeConstant); } } + /** + * Writes a serialization of the given {@link DatatypeConstant} without using + * any Turtle-style abbreviations for common datatypes like string and int. + * + * @param datatypeConstant a {@link DatatypeConstant} + * @throws IOException + */ + public void writeDatatypeConstantNoAbbreviations(DatatypeConstant datatypeConstant) throws IOException { + writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + writer.write("^^"); + writer.write(getIri(datatypeConstant.getDatatype())); + } + /** * Writes a serialization of the given {@link UniversalVariable}. * diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java index c8230303d..a2fb5e0a0 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java @@ -144,6 +144,8 @@ public void abstractConstantToStringTest() { public void datatypeConstantToStringTest() { DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); assertEquals("\"c\"", c.toString()); + assertEquals("\"c\"", c.getRdfLiteralString(true)); + assertEquals("\"c\"^^<" + PrefixDeclarationRegistry.XSD_STRING + ">", c.getRdfLiteralString(false)); } @Test From d63cedaeef6c6fad4b410c831309a1bc9418151a Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:22:21 +0200 Subject: [PATCH 1078/1255] Use expanded RDF serialisation for terms --- .../semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index 3e294f848..ea2f20526 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -58,7 +58,7 @@ public karmaresearch.vlog.Term visit(final AbstractConstant term) { */ @Override public karmaresearch.vlog.Term visit(final DatatypeConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getRdfLiteralString(false)); } /** From 89cd184c6cba3456370a2a61a176bb61bae39a07 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:26:51 +0200 Subject: [PATCH 1079/1255] fix problem with xsd:string in RDF --- RELEASE-NOTES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 2b6ca3371..6ad151e9c 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,6 +1,12 @@ Rulewerk Release Notes ====================== +Rulewerk v0.8.0 +--------------- + +Bugfixes: +* Encoding of RDF strings corrected to make sure VLog succeeds joining on strings + Rulewerk v0.7.0 --------------- From 9685dec07a9d783e3040ab838f8fd2cf5d93bcb0 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 10:54:55 +0200 Subject: [PATCH 1080/1255] New test for correct RDF string handling --- .../reasoner/vlog/VLogReasonerRdfInput.java | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java index c64e829ea..b5d068ab6 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java @@ -34,6 +34,7 @@ import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -48,6 +49,9 @@ public class VLogReasonerRdfInput { private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), Expressions.makeUniversalVariable("o")); + private static final PositiveLiteral queryAtomString = Expressions.makePositiveLiteral(ternaryPredicate, + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeDatatypeConstant("test string", PrefixDeclarationRegistry.XSD_STRING)); @SuppressWarnings("unchecked") private static final Set> expectedTernaryQueryResult = Sets.newSet( @@ -57,6 +61,11 @@ public class VLogReasonerRdfInput { Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), Expressions.makeAbstractConstant("http://example.org/q"), Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + @SuppressWarnings("unchecked") + private static final Set> expectedTernaryQueryResultString = Sets + .newSet(Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); @Ignore // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation @@ -82,6 +91,12 @@ public void testLoadTernaryFactsFromRdfFile() throws IOException { FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); } + @Test + public void queryStringFromRdf_succeeds() throws IOException { + testQueryStringFromSingleRdfDataSource(new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); + } + @Test public void testLoadTernaryFactsFromRdfFileGz() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( @@ -102,6 +117,20 @@ public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fil } } + public void testQueryStringFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomString, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + + assertEquals(expectedTernaryQueryResultString, queryResult); + } + } + @Test(expected = IOException.class) public void testLoadNonexistingRdfFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.nt"); From e31140cf90f92cdcaae7a3d41dde2bd3e789e3c0 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:12:36 +0200 Subject: [PATCH 1081/1255] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 221f3a1b0..9d92b1b9d 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ Development * Pull requests are welcome. * The master branch may require a development version of VLog. -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds "build-vlog" and "local_builds" first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From cbd2bfd21b94e1d25e52e27b682e64464a3a6c48 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:29:45 +0200 Subject: [PATCH 1082/1255] Update README.md change order of items in "Development" section --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 9d92b1b9d..84e98ddd4 100644 --- a/README.md +++ b/README.md @@ -52,8 +52,9 @@ Development ----------- * Pull requests are welcome. +* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. + * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds "build-vlog" and "local_builds" first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. -* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 28deb877ef86f0952a185f79caafe7a631ee2a91 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:30:47 +0200 Subject: [PATCH 1083/1255] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 84e98ddd4..1d41c8e3e 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,6 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of VLog. -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds "build-vlog" and "local_builds" first). +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 2f93127323b6d7829d1ecab8918ffe47cbee9618 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:43:01 +0200 Subject: [PATCH 1084/1255] Update README.md vlog-java instead of vlog-base --- README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 1d41c8e3e..c9f64266a 100644 --- a/README.md +++ b/README.md @@ -33,9 +33,9 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use vlog-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use `vlog-java`, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: -* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog-base. +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -54,7 +54,8 @@ Development * Pull requests are welcome. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. -* The master branch may require a development version of VLog. -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). +* The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code in [VLog](https://github.com/karmaresearch/vlog) master branch. + * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 0c31eac0f3967416bcd60660b89224e9c3f7bbe9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:46:28 +0200 Subject: [PATCH 1085/1255] Update README with references to VLog --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c9f64266a..3fe3b6fbf 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use `vlog-java`, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use `vlog-java`, which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -55,7 +55,7 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code in [VLog](https://github.com/karmaresearch/vlog) master branch. +Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. * Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From 400510ca21eef27c27a69b1fa757fcc713971104 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 12:51:54 +0200 Subject: [PATCH 1086/1255] Update README explain how to install javacc --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3fe3b6fbf..ea5f54327 100644 --- a/README.md +++ b/README.md @@ -57,5 +57,5 @@ Development * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. -* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. +* Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From fe0f86f6951b465b62fc113ce4ce665167e73561 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 25 Sep 2020 14:04:24 +0200 Subject: [PATCH 1087/1255] Core: Fix path handling for Trident data sources --- .../core/reasoner/implementation/TridentDataSource.java | 7 +++---- .../datasources/TridentDataSourceDeclarationHandler.java | 7 ++++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java index 182b04718..8f698e45b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -50,10 +50,10 @@ public class TridentDataSource implements ReasonerDataSource { final String filePath; final String fileName; - public TridentDataSource(final String filePath) { + public TridentDataSource(final String filePath) throws IOException { Validate.notBlank(filePath, "Path to Trident database cannot be blank!"); - this.filePath = filePath; // unmodified file path, necessary for correct serialisation - this.fileName = new File(filePath).getName(); + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = new File(filePath).getCanonicalPath(); } public String getPath() { @@ -64,7 +64,6 @@ public String getName() { return this.fileName; } - @Override public Fact getDeclarationFact() { Predicate predicate = Expressions.makePredicate(declarationPredicateName, 1); diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java index 6640a414e..7ec1627d4 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java @@ -20,6 +20,7 @@ * #L% */ +import java.io.IOException; import java.util.List; import org.semanticweb.rulewerk.core.model.api.DataSource; @@ -39,6 +40,10 @@ public DataSource handleDataSourceDeclaration(List terms, String importBas String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "path to Trident database", importBasePath); - return new TridentDataSource(fileName); + try { + return new TridentDataSource(fileName); + } catch (IOException e) { + throw new ParsingException("Could not use trident database \"" + fileName + "\": " + e.getMessage(), e); + } } } From 6989f010472d3030afa7624ff444a988b8841c62 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 25 Sep 2020 14:08:13 +0200 Subject: [PATCH 1088/1255] Update Release-Notes --- RELEASE-NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 6ad151e9c..cb12aa85e 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -6,6 +6,7 @@ Rulewerk v0.8.0 Bugfixes: * Encoding of RDF strings corrected to make sure VLog succeeds joining on strings +* Fixed handling of trident databases that are not a direct child of the current working directory Rulewerk v0.7.0 --------------- From 0c982fb05f93cbb8d1d77289ce59d5e195caf8cc Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:13:04 +0200 Subject: [PATCH 1089/1255] Update README.md --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ea5f54327..ce411c63a 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,8 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released packages use `vlog-java`, which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: - +The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows +* (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -55,7 +55,9 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). -Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. +Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. + + * Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. From a95098c8e18e12d9a7dd7911e77e2391c9089419 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:15:02 +0200 Subject: [PATCH 1090/1255] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index ce411c63a..902b79b8d 100644 --- a/README.md +++ b/README.md @@ -33,10 +33,10 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows +The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. -* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. -* Run ```mvn install``` to test if the setup works +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. +* Run ```mvn install``` to test if the setup works From 969632be31ab44787890f976739063f5bfd02cbd Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:24:11 +0200 Subject: [PATCH 1091/1255] Update README.md --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 902b79b8d..c01e8d5ff 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,9 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. -The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows +The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. +* Delete (if existing) previous local builds (`local_builds` directory) * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. * Run ```mvn install``` to test if the setup works @@ -55,7 +56,7 @@ Development * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. * The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). -Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to create and install it on your machine (you may need to delete previous local builds `build-vlog` and `local_builds` first). This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. +Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to create and install it on your machine. This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. From 7fdfcb92ede15b5919eaafcf6ebf852948ead166 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 25 Sep 2020 15:24:55 +0200 Subject: [PATCH 1092/1255] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c01e8d5ff..4076efe3c 100644 --- a/README.md +++ b/README.md @@ -35,9 +35,9 @@ You need to use Java 1.8 or above. Available source modules include: The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. -* Delete (if existing) previous local builds (`local_builds` directory) +* Delete (if existing) previous local builds (`local_builds` directory). * Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. -* Run ```mvn install``` to test if the setup works +* Run ```mvn install``` to test if the setup works. From f71880eea9213dc4972a3d256741fa56aa62fb1b Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Fri, 25 Sep 2020 16:29:27 +0200 Subject: [PATCH 1093/1255] fix inconsistent conversion of string constants --- .../rulewerk/reasoner/vlog/TermToVLogConverter.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index ea2f20526..ac4cbfae8 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -58,7 +58,7 @@ public karmaresearch.vlog.Term visit(final AbstractConstant term) { */ @Override public karmaresearch.vlog.Term visit(final DatatypeConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getRdfLiteralString(false)); + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); } /** @@ -78,11 +78,10 @@ public karmaresearch.vlog.Term visit(final LanguageStringConstant term) { * @return VLog constant string */ public static String getVLogNameForConstant(final Constant constant) { - final String constantName = constant.getName(); if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - return getVLogNameForIRI(constantName); + return getVLogNameForIRI(constant.getName()); } else { // datatype literal - return constantName; + return ((DatatypeConstant)constant).getRdfLiteralString(false); } } From 6fb28d0530d887dbbdac47751450d53aef8d2e51 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Thu, 1 Oct 2020 11:13:34 +0200 Subject: [PATCH 1094/1255] Fixed exception when lang-strings are sent to VLog --- RELEASE-NOTES.md | 1 + .../rulewerk/reasoner/vlog/TermToVLogConverter.java | 7 ++++++- .../rulewerk/reasoner/vlog/ModelToVLogConverterTest.java | 1 + 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index cb12aa85e..faf83bafe 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -7,6 +7,7 @@ Rulewerk v0.8.0 Bugfixes: * Encoding of RDF strings corrected to make sure VLog succeeds joining on strings * Fixed handling of trident databases that are not a direct child of the current working directory +* Fixed encoding of language-tagged strings that are used in Rulewerk facts, which had caused an exception Rulewerk v0.7.0 --------------- diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java index ac4cbfae8..ee43f3068 100644 --- a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -1,5 +1,6 @@ package org.semanticweb.rulewerk.reasoner.vlog; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; @@ -80,8 +81,12 @@ public karmaresearch.vlog.Term visit(final LanguageStringConstant term) { public static String getVLogNameForConstant(final Constant constant) { if (constant.getType() == TermType.ABSTRACT_CONSTANT) { return getVLogNameForIRI(constant.getName()); - } else { // datatype literal + } else if (constant.getType() == TermType.DATATYPE_CONSTANT) { return ((DatatypeConstant)constant).getRdfLiteralString(false); + } else if (constant.getType() == TermType.LANGSTRING_CONSTANT) { + return constant.getName(); + } else { + throw new RulewerkRuntimeException("Unexpected term type: " + constant.getType()); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java index 70c188ffc..c3a2f3c99 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java @@ -111,6 +111,7 @@ public void testToVLogTermLanguageStringConstant() { assertEquals(expectedVLogTerm, vLogTerm); assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); } @Test From 43b4be5387cfae94d490ae284dc3380109c99e75 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Oct 2020 08:33:45 +0000 Subject: [PATCH 1095/1255] Bump junit from 4.12 to 4.13.1 Bumps [junit](https://github.com/junit-team/junit4) from 4.12 to 4.13.1. - [Release notes](https://github.com/junit-team/junit4/releases) - [Changelog](https://github.com/junit-team/junit4/blob/main/doc/ReleaseNotes4.12.md) - [Commits](https://github.com/junit-team/junit4/compare/r4.12...r4.13.1) Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8a3d6d7b0..10123a1c3 100644 --- a/pom.xml +++ b/pom.xml @@ -76,7 +76,7 @@ UTF-8 - 4.12 + 4.13.1 2.28.2 1.7.28 3.9 From d97bd54e5abe293af688ffc81ce786dd3e63e873 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 8 Jan 2021 12:26:29 +0100 Subject: [PATCH 1096/1255] Core: Fix uninitialised base IRI falsely treated as set in unresolve When unresolving relative IRIs, an uninitialised base IRI would falsely be treated as set, resulting in an exception. Fixes #197. --- .../AbstractPrefixDeclarationRegistry.java | 6 +++--- .../MergingPrefixDeclarationRegistryTest.java | 14 ++++++++++---- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 851dbe8fb..0d8de1736 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -115,7 +115,7 @@ public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { if (addIriBrackets) { if (!iri.contains(":") && iri.matches(REGEXP_LOCNAME)) { shortestIri = iri; - if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { + if (baseIri != null && !PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { throw new RulewerkRuntimeException("Relative IRIs cannot be serialized when a base is declared."); } } else { diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java index 6ea303c2c..946d2bf17 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -104,7 +104,7 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); assertEquals(2, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } - + @Test public void clearPrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefixIri("eg:", BASE); @@ -112,7 +112,7 @@ public void clearPrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.clear(); assertEquals(0, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); } - + @Test public void setPrefixIri_setSamePrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefixIri("eg:", BASE); @@ -229,4 +229,10 @@ public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() assertNotEquals(RELATIVE, resolvedIri); assertEquals("rw_gen0:" + RELATIVE, resolvedIri); } + + @Test + public void unresolveAbsoluteIri_relativeIriAfterClear_succeeds() throws PrefixDeclarationException { + prefixDeclarations.clear(); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(RELATIVE, true)); + } } From 1965dd7740296256c9c7e6d08bb264b5202964e0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 13 Jan 2021 18:56:01 +0100 Subject: [PATCH 1097/1255] Core: Fix a potential race condition with file data sources `FileDataSource` objects have a list of allowed extensions that is validated on creation. Up to now, this list was processed in an unspecified order, which might result in the wrong extension being picked should one be a suffix of another (this is not the case for any of our data sources). Instead of processing the extensions in parallel, we now use a sequential stream, which avoids this problem, and, as a side effect, fixes #198. --- .../core/reasoner/implementation/FileDataSource.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 68cbf2f68..1fbb8ee2a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -52,7 +52,9 @@ public abstract class FileDataSource implements ReasonerDataSource { * * @param filePath path to a file that will serve as storage for fact * terms. - * @param possibleExtensions a list of extensions that the files could have + * @param possibleExtensions a list of extensions that the files could have. + * Extensions are tried in the given order, no extension + * in the list can be a suffix of a later extension. * @throws IOException if the path of the given {@code file} is * invalid. * @throws IllegalArgumentException if the extension of the given {@code file} @@ -70,7 +72,9 @@ public FileDataSource(final String filePath, final Iterable possibleExte } private String getValidExtension(final String fileName, final Iterable possibleExtensions) { - final Stream extensionsStream = StreamSupport.stream(possibleExtensions.spliterator(), true); + // use a sequential stream here to avoid a potential race + // condition with extensions that are suffixes of one another. + final Stream extensionsStream = StreamSupport.stream(possibleExtensions.spliterator(), false); final Optional potentialExtension = extensionsStream.filter(fileName::endsWith).findFirst(); if (!potentialExtension.isPresent()) { @@ -127,7 +131,7 @@ public Fact getDeclarationFact() { /** * Returns the name of the predicate that is used to define a declaration of * this data source. - * + * * @return */ abstract String getDeclarationPredicateName(); From 9f9c0e06734b6b27bde82e9aa35bbf65bb640a35 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:17:27 +0100 Subject: [PATCH 1098/1255] Migrate CI pipeline --- .github/workflows/test.yml | 17 +++++++++++++++++ .gitignore | 1 + shell.nix | 13 +++++++++++++ vlog/default.nix | 24 +++++++++++++++++++++++ vlog/kognac-lz4.patch | 38 +++++++++++++++++++++++++++++++++++++ vlog/kognac.nix | 32 +++++++++++++++++++++++++++++++ vlog/trident-lz4.patch | 27 ++++++++++++++++++++++++++ vlog/trident.nix | 39 ++++++++++++++++++++++++++++++++++++++ vlog/vlog-lz4.patch | 27 ++++++++++++++++++++++++++ vlog/vlog.nix | 38 +++++++++++++++++++++++++++++++++++++ 10 files changed, 256 insertions(+) create mode 100644 .github/workflows/test.yml create mode 100644 shell.nix create mode 100644 vlog/default.nix create mode 100644 vlog/kognac-lz4.patch create mode 100644 vlog/kognac.nix create mode 100644 vlog/trident-lz4.patch create mode 100644 vlog/trident.nix create mode 100644 vlog/vlog-lz4.patch create mode 100644 vlog/vlog.nix diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..40d8da784 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,17 @@ +name: "Test" +on: + pull_request: + push: +jobs: + tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - uses: cachix/install-nix-action@v12 + with: + nix_path: nixpkgs=channel:nixos-unstable + - uses: cachix/cachix-action@v8 + with: + name: knowsys + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + - run: nix-shell --run "mvn test" diff --git a/.gitignore b/.gitignore index b052b97ca..94a267652 100644 --- a/.gitignore +++ b/.gitignore @@ -56,3 +56,4 @@ rulewerk-rdf/src/main/data/output/* rulewerk-vlog/src/test/data/output/* /build-vlog/vlog/ /TAGS +/vlog/result* diff --git a/shell.nix b/shell.nix new file mode 100644 index 000000000..79e46b9c1 --- /dev/null +++ b/shell.nix @@ -0,0 +1,13 @@ +let pkgs = import {}; + + maven = pkgs.maven; + dependencies = import ./vlog; + deps = dependencies.deps // { maven = maven; }; +in pkgs.mkShell { + buildInputsNative = [ maven deps.jdk dependencies.vlog ]; + shellHook = '' + ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar + mvn initialize -Pdevelopment + mvn install -DskipTests + ''; +} diff --git a/vlog/default.nix b/vlog/default.nix new file mode 100644 index 000000000..3b68e8943 --- /dev/null +++ b/vlog/default.nix @@ -0,0 +1,24 @@ +let pkgs = import {}; + + args = { pkgs = pkgs; + lz4 = pkgs.lz4.override { enableStatic = true; }; + git = pkgs.git; + jdk = pkgs.jdk8_headless; + curl = pkgs.curl; + zlib = pkgs.zlib; + cmake = pkgs.cmake; + cacert = pkgs.cacert; + sparsehash = pkgs.sparsehash; + }; + + kognac = import ./kognac.nix args; + trident = import ./trident.nix (args // { inherit kognac; }); + vlog = import ./vlog.nix (args // { inherit kognac; inherit trident; }); + + deps = builtins.removeAttrs args [ "pkgs" ]; +in +{ inherit vlog; + inherit trident; + inherit kognac; + inherit deps; +} diff --git a/vlog/kognac-lz4.patch b/vlog/kognac-lz4.patch new file mode 100644 index 000000000..0bd93cde3 --- /dev/null +++ b/vlog/kognac-lz4.patch @@ -0,0 +1,38 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 3a24e70..3079812 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -97,30 +97,9 @@ ENDIF() + + #LZ4 + # we need it statically included, so download it, not only if it cannot be found! +-# find_library(lz4 lz4) +-# find_path (lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to download it from the GIT repository ...") +- message("Downloading lz4, static version required") +- ExternalProject_Add(git-lz4 +- DOWNLOAD_COMMAND git clone https://github.com/Cyan4973/lz4.git +- DOWNLOAD_DIR external +- SOURCE_DIR external/lz4/ +- CONFIGURE_COMMAND "" +- BUILD_IN_SOURCE 1 +- BUILD_COMMAND make -C lib lib MOREFLAGS=-fPIC +- INSTALL_COMMAND "" +- ) +- ExternalProject_Get_Property(git-lz4 SOURCE_DIR) +- include_directories(${SOURCE_DIR}/lib/) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${CMAKE_BINARY_DIR}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +- add_dependencies(lz4 git-lz4) +- add_dependencies(kognac-o lz4) +-#ELSE() +-# include_directories(lz4h) +-#message("Found LZ4, lz4=${lz4}") +-#ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path (lz4h lz4.h) ++include_directories(lz4h) + + #standard include + include_directories(include/) diff --git a/vlog/kognac.nix b/vlog/kognac.nix new file mode 100644 index 000000000..b239aca66 --- /dev/null +++ b/vlog/kognac.nix @@ -0,0 +1,32 @@ +{ pkgs, lz4, git, zlib, cmake, cacert, sparsehash, ...}: +pkgs.stdenv.mkDerivation { + name = "kognac-unstable-2020-12-01"; + src = pkgs.fetchgit { + url = "git://github.com/karmaresearch/kognac"; + rev = "8430b081f8d76b11fa6858f3ec31a9ea5a5cf6a9"; + sha256 = "0mhmidbmcwql5h2qjfz3yvfhp79farx5j3cbdpisimk1zmwlzxjf"; + }; + + buildInputs = [ zlib sparsehash lz4 ]; + nativeBuildInputs = [ cmake git cacert ]; + outputs = [ "out" "lib" "dev" ]; + + cmakeFlags = [ "-DCMAKE_CXX_FLAGS=-w" ]; + patches = [ ./kognac-lz4.patch ]; + + installPhase = '' + mkdir -p $out + cp ./kognac_exec $out/ + + mkdir -p $lib + cp ./libkognac-core.so $lib/ + + mkdir -p $dev + cp -R $src/include/kognac/ $dev/ + cp -R $src/include/zstr/ $dev/ + ''; + + postFixup = '' + patchelf --set-rpath $lib:$(patchelf --print-rpath $out/kognac_exec) $out/kognac_exec + ''; +} diff --git a/vlog/trident-lz4.patch b/vlog/trident-lz4.patch new file mode 100644 index 000000000..d0b97f10f --- /dev/null +++ b/vlog/trident-lz4.patch @@ -0,0 +1,27 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 439b296..41dfa66 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -121,19 +121,9 @@ IF (${sparsehash} STREQUAL "sparsehash-NOTFOUND") + ENDIF() + + #LZ4 +-# find_library(lz4 lz4) +-# find_path(lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to use the version from kognac") +- message("I'm going to use LZ4 the version from kognac") +- include_directories(${KOGNAC_LIB}/external/lz4/lib/) +- LINK_DIRECTORIES(${KOGNAC_LIB}/external/lz4/lib) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${KOGNAC_LIB}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +-# ELSE() +-# include_directories(lz4h) +-# message("-- Found LZ4") +-# ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path(lz4h lz4.h) ++include_directories(lz4h) + + #LZ4 + # find_library(lz4 lz4) diff --git a/vlog/trident.nix b/vlog/trident.nix new file mode 100644 index 000000000..c7c222e51 --- /dev/null +++ b/vlog/trident.nix @@ -0,0 +1,39 @@ +{ pkgs, lz4, git, zlib, cmake, cacert, sparsehash, kognac, ... }: +pkgs.stdenv.mkDerivation { + name = "trident-unstable-2021-02-05"; + src = pkgs.fetchgit { + url = "git://github.com/karmaresearch/trident"; + rev = "53630ea83460b5e78851b753f245efaefbcaa57f"; + sha256 = "1irjdzjxzwakgalliry23vcl5iqf0w5bm82wra91mlyqmgirnk2x"; + }; + + buildInputs = [ zlib sparsehash lz4 ]; + nativeBuildInputs = [ cmake git cacert ]; + outputs = [ "out" "lib" "dev" ]; + + cmakeFlags = [ "-DSPARQL=1" + "-DCMAKE_CXX_FLAGS=-w" + "-DKOGNAC_LIB=${kognac.lib}" + "-DKOGNAC_INC=${kognac.dev}" + ]; + patches = [ ./trident-lz4.patch ]; + + installPhase = '' + mkdir -p $out + cp ./trident $out/ + + mkdir -p $lib/ + cp ./libtrident-core.so $lib/ + cp ./libtrident-sparql.so $lib/ + + mkdir -p $dev/ + cp -R $src/include/trident $dev/ + cp -R $src/include/layers $dev/ + cp -R $src/rdf3x/include $dev/ + ''; + + postFixup = '' + patchelf --set-rpath $lib:$(patchelf --print-rpath $out/trident) $out/trident + patchelf --set-rpath $lib:$(patchelf --print-rpath $lib/libtrident-sparql.so) $lib/libtrident-sparql.so + ''; +} diff --git a/vlog/vlog-lz4.patch b/vlog/vlog-lz4.patch new file mode 100644 index 000000000..0a628f5fb --- /dev/null +++ b/vlog/vlog-lz4.patch @@ -0,0 +1,27 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index b72b116..241a32d 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -120,19 +120,9 @@ IF (${sparsehash} STREQUAL "sparsehash-NOTFOUND") + ENDIF() + + #LZ4 +-# find_library(lz4 lz4) +-# find_path(lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to use the version from kognac") +- message("I'm going to use LZ4 the version from kognac") +- include_directories(${KOGNAC_LIB}/external/lz4/lib/) +- LINK_DIRECTORIES(${KOGNAC_LIB}/external/lz4/lib) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${KOGNAC_LIB}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +-# ELSE() +-# include_directories(lz4h) +-# message("-- Found LZ4") +-# ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path(lz4h lz4.h) ++include_directories(lz4h) + + IF (JAVA) + find_package(Java REQUIRED) diff --git a/vlog/vlog.nix b/vlog/vlog.nix new file mode 100644 index 000000000..1dd186c47 --- /dev/null +++ b/vlog/vlog.nix @@ -0,0 +1,38 @@ +{ pkgs, lz4, git, jdk, curl, zlib, cmake, cacert, sparsehash, kognac, trident, ... }: +pkgs.stdenv.mkDerivation { + name = "vlog"; + src = pkgs.fetchgit { + url = "git://github.com/karmaresearch/vlog"; + rev = "c20fa48fc284b333ce03e63ca3ad97dc51701542"; + sha256 = "0y1zv4bwb84rv09ihc8jc11hxxffrspk8v01s28cv2nymg2306q4"; + }; + + buildInputs = [ kognac trident sparsehash jdk curl lz4 ]; + nativeBuildInputs = [ cmake git cacert ]; + outputs = [ "out" "lib" "dev" ]; + + cmakeFlags = [ "-DJAVA=1" + "-DSPARQL=1" + "-DCMAKE_CXX_FLAGS=-w" + "-DKOGNAC_LIB=${kognac.lib}" + "-DKOGNAC_INC=${kognac.dev}" + "-DTRIDENT_LIB=${trident.lib}" + "-DTRIDENT_INC=${trident.dev}" + ]; + patches = [ ./vlog-lz4.patch ]; + + postInstall = '' + mkdir -p $out + cp ./vlog $out/ + + mkdir -p $lib + cp ./libvlog-core.so $lib/ + + mkdir -p $dev + cp ./jvlog.jar $dev/ + ''; + + postFixup = '' + patchelf --set-rpath $lib:$(patchelf --print-rpath $out/vlog) $out/vlog + ''; +} From 5bc28147fad146ca056cfb5e08b3912d4f881661 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:27:37 +0100 Subject: [PATCH 1099/1255] Ensure target directory for local jvlog exists --- .gitignore | 3 +++ shell.nix | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 94a267652..1def40cfa 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,9 @@ nbactions.xml out/ target/ +# local jvlog +/rulewerk-vlog/lib/ + # Don't apply the above to src/ where Java requires # subdirectories named according to package names. # We do not want to forbid things like "dumpfiles" in diff --git a/shell.nix b/shell.nix index 79e46b9c1..831ce632a 100644 --- a/shell.nix +++ b/shell.nix @@ -5,7 +5,8 @@ let pkgs = import {}; deps = dependencies.deps // { maven = maven; }; in pkgs.mkShell { buildInputsNative = [ maven deps.jdk dependencies.vlog ]; - shellHook = '' + shellHook = ''' + mkdir -p rulewerk-vlog/lib/ ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar mvn initialize -Pdevelopment mvn install -DskipTests From 443fd118bece226720d0a2b2692c0dce5c8183c5 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:40:42 +0100 Subject: [PATCH 1100/1255] Update badge & migrate coverage --- .github/workflows/test.yml | 5 ++++- README.md | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 40d8da784..1f2450b58 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -14,4 +14,7 @@ jobs: with: name: knowsys authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - run: nix-shell --run "mvn test" + - run: nix-shell --run "mvn test jacoco:report" + - uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/README.md b/README.md index 4076efe3c..19e7d43f5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ Rulewerk ====== -[![Build Status](https://travis-ci.org/knowsys/rulewerk.png?branch=master)](https://travis-ci.org/knowsys/rulewerk) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/test/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:test) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From 04b2b48a6867bb5efc4c2efbcbc2dca0cc52a85a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 05:54:35 +0100 Subject: [PATCH 1101/1255] Try to fix coveralls reporting --- .github/workflows/test.yml | 10 ++++++---- shell.nix | 4 ++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1f2450b58..2230a09cb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -14,7 +14,9 @@ jobs: with: name: knowsys authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - run: nix-shell --run "mvn test jacoco:report" - - uses: coverallsapp/github-action@master - with: - github-token: ${{ secrets.GITHUB_TOKEN }} + - env: + BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} + run: echo "::set-env name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" + - run: | + echo "::set-env name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" + - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" diff --git a/shell.nix b/shell.nix index 831ce632a..959415135 100644 --- a/shell.nix +++ b/shell.nix @@ -8,7 +8,7 @@ in pkgs.mkShell { shellHook = ''' mkdir -p rulewerk-vlog/lib/ ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar - mvn initialize -Pdevelopment - mvn install -DskipTests + mvn --no-transfer-progress initialize -Pdevelopment + mvn --no-transfer-progress install -DskipTests ''; } From 3a7ac99721722ec2c6749793aeae9a18cce25f57 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:00:55 +0100 Subject: [PATCH 1102/1255] Use ENV files instead of deprecated ::set-env --- .github/workflows/{test.yml => tests.yml} | 7 +++---- README.md | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) rename .github/workflows/{test.yml => tests.yml} (77%) diff --git a/.github/workflows/test.yml b/.github/workflows/tests.yml similarity index 77% rename from .github/workflows/test.yml rename to .github/workflows/tests.yml index 2230a09cb..7dc431f49 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/tests.yml @@ -1,4 +1,4 @@ -name: "Test" +name: "Tests" on: pull_request: push: @@ -16,7 +16,6 @@ jobs: authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - env: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} - run: echo "::set-env name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" - - run: | - echo "::set-env name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" + run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV + - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" diff --git a/README.md b/README.md index 19e7d43f5..736363c96 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ Rulewerk ====== -[![Build Status](https://github.com/knowsys/rulewerk/workflows/test/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:test) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Tests) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From c7bd8f68068b30ff29f404f1fc2eb00c2e82b000 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:04:58 +0100 Subject: [PATCH 1103/1255] Disable transfer progress for coveralls invocation --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7dc431f49..46a511af6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -18,4 +18,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" + - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn --no-transfer-progress coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" From 4635e517a619da6b8e5d725206d2c2d0b4ff0639 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:07:53 +0100 Subject: [PATCH 1104/1255] Be more generous about ignoring generated code wrt. coverage --- pom.xml | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 10123a1c3..1f9d8b879 100644 --- a/pom.xml +++ b/pom.xml @@ -298,6 +298,18 @@ not try to aggregate this into the final coverage report, since we want to control aggregation ourselves. --> ${project.reporting.outputDirectory}/jacoco-ut + + + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError* + @@ -305,14 +317,14 @@ - **/javacc/JavaCCParser.class - **/javacc/JavaCCParserConstants.class - **/javacc/JavaCCParserTokenManager.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError* From e67b722d89e9c06b9ff2d5fdb4e4f7f6f0a2676e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 17 Feb 2021 06:34:48 +0100 Subject: [PATCH 1105/1255] Fix coveralls submission --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 46a511af6..a86878ee9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -18,4 +18,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn --no-transfer-progress coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceName=github -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" + - run: nix-shell --run "mvn --no-transfer-progress test jacoco:report && mvn --no-transfer-progress coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" From 924572b536b85389a5f5762c0a059f894a8e52ed Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 18 Feb 2021 21:20:57 +0100 Subject: [PATCH 1106/1255] Link to CI documentation in README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 736363c96..90b75ce8f 100644 --- a/README.md +++ b/README.md @@ -62,3 +62,4 @@ Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to cr * Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. +* The CI setup is [documented here](https://github.com/knowsys/rulewerk/wiki/CI-Setup). \ No newline at end of file From 17a3a1a520ddf41a8c9a522060d3983b802c0bf9 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 15 Mar 2021 18:54:22 +0100 Subject: [PATCH 1107/1255] update README with Matrix Element support room link --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 90b75ce8f..d7caf03e6 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,7 @@ Documentation * The GitHub project **[Rulewerk Example](https://github.com/knowsys/rulewerk-example)** shows how to use Rulewerk in own Maven projects and can be used as a skeleton for own projects * [JavaDoc](https://knowsys.github.io/rulewerk/) is available online and through the Maven packages. * A Rulewerk [Wiki](https://github.com/knowsys/rulewerk/wiki) is available online, with detailed information about rulewerk usage, the supported rule language [examples](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar), and related publications. +* You can contact developers and other users about usage and or development on our [support channel](https://matrix.to/#/#rulewerk-support:tu-dresden.de). Development ----------- @@ -62,4 +63,4 @@ Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to cr * Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. -* The CI setup is [documented here](https://github.com/knowsys/rulewerk/wiki/CI-Setup). \ No newline at end of file +* The CI setup is [documented here](https://github.com/knowsys/rulewerk/wiki/CI-Setup). From af27709c9624328eb2321e78f97ce0f2dad4e269 Mon Sep 17 00:00:00 2001 From: "Irina.Dragoste" Date: Thu, 25 Mar 2021 11:46:20 +0100 Subject: [PATCH 1108/1255] correct javadoc for Reasoner#writeInfereces(String) --- .../java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 3f759f573..d94ea7128 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -176,7 +176,7 @@ default Stream getInferences() { * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException * @throws FileNotFoundException - * @deprecated Use {@link KnowledgeBase#writeInferences(Writer)} instead. The + * @deprecated Use {@link Reasoner#writeInferences(Writer)} instead. The * method will disappear. */ @Deprecated From 9a659c90daefb05730bd9cb82df0ccc4a6dcccee Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 09:45:24 +0200 Subject: [PATCH 1109/1255] add dependency to parser; use autoformating --- rulewerk-vlog/pom.xml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index a098e8a17..54c6a8094 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -27,6 +27,11 @@ rulewerk-core ${project.version} + + ${project.groupId} + rulewerk-parser + ${project.version} + ${project.groupId} @@ -38,8 +43,8 @@ development - From 11ea039f2397ebb9bf5d65705b9e9da34d3c900a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 09:46:21 +0200 Subject: [PATCH 1110/1255] add issue61 --- .../reasoner/vlog/issues/Issue61.java | 71 +++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java new file mode 100644 index 000000000..d192b46a1 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java @@ -0,0 +1,71 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +import static org.junit.Assert.assertEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue61 { + + @Test + public void part01() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); + RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + + assertEquals(2,reasoner.countQueryAnswers(query,true).getCount()); + reasoner.close(); + } + + @Test + public void part02() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); + RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + + assertEquals(1,reasoner.countQueryAnswers(query,true).getCount()); + reasoner.close(); + } + +} From 6482f2dbb35d3d7eabb9265d74c23e7cd2570419 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 09:59:02 +0200 Subject: [PATCH 1111/1255] add issue67 --- .../reasoner/vlog/issues/Issue67.java | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java new file mode 100644 index 000000000..8b72c7cbb --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java @@ -0,0 +1,59 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +import static org.junit.Assert.assertTrue; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue67 { + + @Test + public void part01() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "B1_(a, b, c, d, prov1) ."); + RuleParser.parseInto(kb, "B2_(a, a, c, prov2) . "); + RuleParser.parseInto(kb, "H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) ."); + RuleParser.parseInto(kb, "H2_(n1_3_0, n1_5_0, n1_6_0) ."); + RuleParser.parseInto(kb, "true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + Fact query = RuleParser.parseFact("true(a)."); + + Set inferences = reasoner.getInferences().collect(Collectors.toSet()); + assertTrue(inferences.contains(query)); + reasoner.close(); + } + +} From f4572bc1ba7f73dc7c55f359a60b12f4fe6828c3 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 10:18:38 +0200 Subject: [PATCH 1112/1255] ordering; add issue 63; add @ignore --- .../reasoner/vlog/issues/Issue61.java | 17 ++++--- .../reasoner/vlog/issues/Issue63.java | 48 +++++++++++++++++++ .../reasoner/vlog/issues/Issue67.java | 9 ++-- 3 files changed, 64 insertions(+), 10 deletions(-) create mode 100644 rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java index d192b46a1..6165fa13a 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java @@ -24,6 +24,7 @@ import java.io.IOException; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -34,6 +35,7 @@ public class Issue61 { + @Ignore @Test public void part01() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); @@ -41,16 +43,17 @@ public void part01() throws ParsingException, IOException { RuleParser.parseInto(kb, "p(a)."); RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); - + Reasoner reasoner = new VLogReasoner(kb); reasoner.reason(); - + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - - assertEquals(2,reasoner.countQueryAnswers(query,true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + reasoner.close(); } + @Ignore @Test public void part02() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); @@ -61,10 +64,10 @@ public void part02() throws ParsingException, IOException { Reasoner reasoner = new VLogReasoner(kb); reasoner.reason(); - + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - - assertEquals(1,reasoner.countQueryAnswers(query,true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + reasoner.close(); } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java new file mode 100644 index 000000000..1f09ecd89 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java @@ -0,0 +1,48 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue63 { + + @Test(expected = RulewerkRuntimeException.class) + public void test() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X) :- ~p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + reasoner.close(); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java index 8b72c7cbb..57b0a9c04 100644 --- a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java @@ -26,6 +26,7 @@ import java.util.Set; import java.util.stream.Collectors; +import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; @@ -36,6 +37,7 @@ public class Issue67 { + @Ignore @Test public void part01() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); @@ -44,15 +46,16 @@ public void part01() throws ParsingException, IOException { RuleParser.parseInto(kb, "B2_(a, a, c, prov2) . "); RuleParser.parseInto(kb, "H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) ."); RuleParser.parseInto(kb, "H2_(n1_3_0, n1_5_0, n1_6_0) ."); - RuleParser.parseInto(kb, "true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) ."); + RuleParser.parseInto(kb, + "true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) ."); Reasoner reasoner = new VLogReasoner(kb); reasoner.reason(); + Set inferences = reasoner.getInferences().collect(Collectors.toSet()); Fact query = RuleParser.parseFact("true(a)."); - - Set inferences = reasoner.getInferences().collect(Collectors.toSet()); assertTrue(inferences.contains(query)); + reasoner.close(); } From 0af7a662e43532285a6ba3f5a049a1c131b0f00d Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 29 Mar 2021 10:21:57 +0200 Subject: [PATCH 1113/1255] detele extra spaces --- rulewerk-vlog/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 54c6a8094..798211aea 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -43,8 +43,8 @@ development - From d55fbebbd2ac48d371f9334aabb00910d66a65b3 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 30 Mar 2021 13:07:21 +0200 Subject: [PATCH 1114/1255] add integrationtests module; use autoformating --- coverage/pom.xml | 85 +++++++++++++++++++++++++----------------------- pom.xml | 31 ++++++++++-------- 2 files changed, 62 insertions(+), 54 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index cd9c75339..83f678c46 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -14,46 +14,51 @@ coverage - - ${project.groupId} - rulewerk-core - ${project.version} - - - ${project.groupId} - rulewerk-vlog - ${project.version} - - - ${project.groupId} - rulewerk-rdf - ${project.version} - - - ${project.groupId} - rulewerk-owlapi - ${project.version} - - - ${project.groupId} - rulewerk-graal - ${project.version} - - - ${project.groupId} - rulewerk-parser - ${project.version} - - - ${project.groupId} - rulewerk-commands - ${project.version} - - - ${project.groupId} - rulewerk-client - ${project.version} - + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + + + ${project.groupId} + rulewerk-client + ${project.version} + + + ${project.groupId} + rulewerk-integrationtests + ${project.version} + diff --git a/pom.xml b/pom.xml index 1f9d8b879..08b77fc58 100644 --- a/pom.xml +++ b/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -13,7 +15,7 @@ https://github.com/knowsys/rulewerk - rulewerk-core rulewerk-vlog @@ -24,8 +26,9 @@ rulewerk-commands rulewerk-examples rulewerk-client + rulewerk-integrationtests coverage - + @@ -131,7 +134,7 @@ org.codehaus.mojo license-maven-plugin 1.14 - + first @@ -155,7 +158,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -174,7 +177,7 @@ - + @@ -189,7 +192,7 @@ - + @@ -231,7 +234,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -294,13 +297,13 @@ test - ${project.reporting.outputDirectory}/jacoco-ut - + **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* **/javacc/JavaCCParserTokenManager* @@ -315,7 +318,7 @@ - **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* @@ -329,7 +332,7 @@ - org.apache.maven.plugins maven-javadoc-plugin From 8426b5224eb1617e1793542fd1fed81e1f8d11a2 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 30 Mar 2021 13:07:54 +0200 Subject: [PATCH 1115/1255] add vlog issues --- rulewerk-integrationtests/LICENSE.txt | 201 ++++++++++++++++++ rulewerk-integrationtests/pom.xml | 53 +++++ .../vlogissues/VLogIssue.java | 27 +++ .../vlogissues/VLogIssue61.java | 66 ++++++ .../vlogissues/VLogIssue63.java | 45 ++++ .../vlogissues/VLogIssue67.java | 51 +++++ .../vlogissues/VLogIssue69.java | 54 +++++ .../src/test/resources/vlogissues/61-1.rls | 3 + .../src/test/resources/vlogissues/61-2.rls | 3 + .../src/test/resources/vlogissues/63.rls | 3 + .../src/test/resources/vlogissues/67.rls | 5 + .../src/test/resources/vlogissues/69.rls | 6 + 12 files changed, 517 insertions(+) create mode 100644 rulewerk-integrationtests/LICENSE.txt create mode 100644 rulewerk-integrationtests/pom.xml create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/63.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/67.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/69.rls diff --git a/rulewerk-integrationtests/LICENSE.txt b/rulewerk-integrationtests/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-integrationtests/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml new file mode 100644 index 000000000..5bc396a97 --- /dev/null +++ b/rulewerk-integrationtests/pom.xml @@ -0,0 +1,53 @@ + + + 4.0.0 + + org.semanticweb.rulewerk + rulewerk-parent + 0.8.0-SNAPSHOT + + + rulewerk-integrationtests + jar + + Rulewerk Integration Tests + Contains blackbox tests for VLog + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + + + + + + + + org.codehaus.mojo + cobertura-maven-plugin + 2.7 + + true + true + + + + + + \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java new file mode 100644 index 000000000..93f0cc4b9 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -0,0 +1,27 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +class VLogIssue { + + final String RESOURCES = "src/test/resources/vlogissues/";; + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java new file mode 100644 index 000000000..c99f081ff --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java @@ -0,0 +1,66 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue61 extends VLogIssue{ + + @Ignore + @Test + public void test01() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-1.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + + @Test + public void test02() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-2.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java new file mode 100644 index 000000000..422b3d153 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java @@ -0,0 +1,45 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue63 extends VLogIssue { + + @Test(expected = RulewerkRuntimeException.class) + public void test() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "63.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + reasoner.close(); + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java new file mode 100644 index 000000000..73377a732 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java @@ -0,0 +1,51 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue67 extends VLogIssue{ + + @Test + public void test() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "67.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("true(a)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java new file mode 100644 index 000000000..c1c8bd17f --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java @@ -0,0 +1,54 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class VLogIssue69 extends VLogIssue{ + + @Test + public void test() throws ParsingException, IOException { + KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "69.rls")); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); + assertEquals(2, reasoner.countQueryAnswers(query1, true).getCount()); + + PositiveLiteral query2 = RuleParser.parsePositiveLiteral("conc(?X,?Y)"); + assertEquals(4, reasoner.countQueryAnswers(query2, true).getCount()); + + reasoner.close(); + } + +} diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls new file mode 100644 index 000000000..98e034606 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls @@ -0,0 +1,3 @@ +p(a) . +q(?X,!Y,!Z) :- p(?X) . +q(?X,!Y,!Y) :- p(?X) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls new file mode 100644 index 000000000..b722059d7 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls @@ -0,0 +1,3 @@ +p(a) . +q(?X,!Y,!Y) :- p(?X) . +q(?X,!Y,!Z) :- p(?X) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls new file mode 100644 index 000000000..81832fc8a --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls @@ -0,0 +1,3 @@ +%https://github.com/karmaresearch/vlog/issues/61 +p(a). +q(?X):-~p(?X). \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls new file mode 100644 index 000000000..a0854c7c1 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls @@ -0,0 +1,5 @@ +B1_(a, b, c, d, prov1) . +B2_(a, a, c, prov2). +H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) . +H2_(n1_3_0, n1_5_0, n1_6_0) . +true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls new file mode 100644 index 000000000..9bb347613 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls @@ -0,0 +1,6 @@ +B1_(a, b, c, d, prov1) . +B2_(a, a, c, prov2) . + +H1_(?x1, !z1, !z1, !z2, !F_2), H2_(!z2, !z3, !F_3), H3_(?x2, !F_4), H4_(!z1, !F_5) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1) . +inst(?x1, ?x2, ?y1, ?y2, !F_6, rule0) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1) . +prec(?F_0, ?F_6), prec(?F_1, ?F_6), conc(?F_6, ?F_2), conc(?F_6, ?F_3), conc(?F_6, ?F_4), conc(?F_6, ?F_5) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1), H1_(?x1, ?z1, ?z1, ?z2, ?F_2), H2_(?z2, ?z3, ?F_3), H3_(?x2, ?F_4), H4_(?z1, ?F_5), inst(?x1, ?x2, ?y1, ?y2, ?F_6, rule0) . \ No newline at end of file From f08f095ba36ba3197ee5cbb1210ea867ea1d55a1 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 30 Mar 2021 13:14:18 +0200 Subject: [PATCH 1116/1255] remove build section --- rulewerk-integrationtests/pom.xml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index 5bc396a97..83e940746 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -34,20 +34,4 @@ - - - - - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 - - true - true - - - - - \ No newline at end of file From f6bc3a01a0a385d645de1141b5b43f09f075335b Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 9 Apr 2021 09:55:17 +0200 Subject: [PATCH 1117/1255] make VLogIssue class abstract; delete extra semicolon --- .../rulewerk/integrationtests/vlogissues/VLogIssue.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java index 93f0cc4b9..b1153ea19 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -20,8 +20,8 @@ * #L% */ -class VLogIssue { +abstract class VLogIssue { - final String RESOURCES = "src/test/resources/vlogissues/";; + final String RESOURCES = "src/test/resources/vlogissues/"; } From b75f783ca7e3ec01f5540b9350b19f6d44a02abf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 20:34:26 +0200 Subject: [PATCH 1118/1255] Refactor integration tests - Use maven-failsafe-plugin to run the integration tests - Exclude integration tests from code coverage, so that coverage reports are generated even when integration tests fail - Provide a convenient wrapper for creating VLogReasoner instances from RLS files --- coverage/pom.xml | 11 +- pom.xml | 34 +- rulewerk-integrationtests/pom.xml | 4 +- .../integrationtests/IntegrationTest.java | 77 +++ .../vlogissues/RulewerkIssue175IT.java | 35 ++ .../vlogissues/VLogIssue.java | 44 +- .../vlogissues/VLogIssue61.java | 66 --- .../{VLogIssue63.java => VLogIssue61IT.java} | 35 +- .../vlogissues/VLogIssue63IT.java | 19 + .../{VLogIssue67.java => VLogIssue67IT.java} | 23 +- .../vlogissues/VLogIssue69.java | 27 +- .../vlogissues/rulewerk/175-minimal.rls | 4 + .../resources/vlogissues/rulewerk/175.rls | 464 ++++++++++++++++++ .../resources/vlogissues/{ => vlog}/61-1.rls | 0 .../resources/vlogissues/{ => vlog}/61-2.rls | 0 .../resources/vlogissues/{ => vlog}/63.rls | 0 .../resources/vlogissues/{ => vlog}/67.rls | 0 .../resources/vlogissues/{ => vlog}/69.rls | 0 18 files changed, 691 insertions(+), 152 deletions(-) create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java delete mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java rename rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/{VLogIssue63.java => VLogIssue61IT.java} (53%) create mode 100644 rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java rename rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/{VLogIssue67.java => VLogIssue67IT.java} (68%) create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls create mode 100644 rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/61-1.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/61-2.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/63.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/67.rls (100%) rename rulewerk-integrationtests/src/test/resources/vlogissues/{ => vlog}/69.rls (100%) diff --git a/coverage/pom.xml b/coverage/pom.xml index 83f678c46..be85d779e 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -54,11 +54,12 @@ rulewerk-client ${project.version} - - ${project.groupId} - rulewerk-integrationtests - ${project.version} - + + + + + + diff --git a/pom.xml b/pom.xml index 08b77fc58..7ae152180 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ https://github.com/knowsys/rulewerk - rulewerk-core rulewerk-vlog @@ -79,7 +79,7 @@ UTF-8 - 4.13.1 + 4.13.2 2.28.2 1.7.28 3.9 @@ -158,7 +158,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -216,7 +216,6 @@ - @@ -234,7 +233,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -250,13 +249,26 @@ org.apache.maven.plugins maven-surefire-plugin - 3.0.0-M4 + 3.0.0-M5 ${surefireArgLine} 1 true + + org.apache.maven.plugins + maven-failsafe-plugin + 3.0.0-M5 + + + + integration-test + verify + + + + org.eluder.coveralls @@ -297,12 +309,12 @@ test - ${project.reporting.outputDirectory}/jacoco-ut - **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* @@ -318,7 +330,7 @@ - **/javacc/JavaCCParser* **/javacc/JavaCCParserConstants* @@ -332,7 +344,7 @@ - org.apache.maven.plugins maven-javadoc-plugin diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index 83e940746..91bd919e3 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -31,7 +31,5 @@ rulewerk-vlog ${project.version} - - - \ No newline at end of file + diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java new file mode 100644 index 000000000..3bf14ab0b --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java @@ -0,0 +1,77 @@ +package org.semanticweb.rulewerk.integrationtests; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.InputStream; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public abstract class IntegrationTest { + /** + * Returns the prefix to use for resource names + * + * @return the prefix to use when turning resource names into paths + * + * This needs to be overriden in subpackages for loading to work + * correctly. + */ + protected String getResourcePrefix() { + return "/"; + } + + /** + * Obtain an input stream for a resource name + * + * @param resourceName the resource name to load + * @return an {@link InputStream} pointing to the resource + */ + protected InputStream getResourceAsStream(String resourceName) { + String prefix = getResourcePrefix(); + + if (resourceName.startsWith(prefix)) { + prefix = ""; + } else if (resourceName.startsWith("/") && prefix.endsWith("/")) { + prefix = prefix.substring(0, prefix.length() - 1); + } + + return this.getClass().getResourceAsStream(prefix + resourceName); + } + + /** + * Load a Knowledge Base from a resource name + * + * @param resourceName the name of the resource to parse into a Knowledge Base + * + * @throws ParsingException when there is an error during parsing + * + * @return a {@link KnowledgeBase} containing the parsed contents of the named + * resource + */ + protected KnowledgeBase parseKbFromResource(String resourceName) throws ParsingException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, getResourceAsStream(resourceName)); + + return kb; + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java new file mode 100644 index 000000000..b6e38d056 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java @@ -0,0 +1,35 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RulewerkIssue175IT extends VLogIssue { + @Test + public void issue175_full_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("rulewerk/175.rls")) { + reasoner.reason(); + try (QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral("VANDALISMRESERVEDENTITIESSUPPREL0", + Expressions.makeAbstractConstant("VANDALISMRESERVEDENTITIESSUPPRULE50")), false)) { + assertTrue(result.hasNext()); + } + } + } + + @Test + public void issue175_minimal_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("rulewerk/175-minimal.rls")) { + reasoner.reason(); + try (QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral("VANDALISMRESERVEDENTITIESSUPPREL0", + Expressions.makeAbstractConstant("VANDALISMRESERVEDENTITIESSUPPRULE50")), false)) { + assertTrue(result.hasNext()); + } + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java index b1153ea19..d46941a7e 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -1,27 +1,27 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; -/*- - * #%L - * Rulewerk Integration Tests - * %% - * Copyright (C) 2018 - 2021 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.integrationtests.IntegrationTest; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -abstract class VLogIssue { - - final String RESOURCES = "src/test/resources/vlogissues/"; +abstract class VLogIssue extends IntegrationTest { + @Override + protected String getResourcePrefix() { + return "/vlogissues/"; + } + /** + * Obtain a reasoner loaded with the Knowledge Base read from the resource name + * + * @param resourceName the name of the resource to load into the Reasoner + * + * @throws ParsingException when there is an error during parsing + * + * @return a {@link VLogReasoner} containing the parsed contents of the named + * resource + */ + protected Reasoner getReasonerWithKbFromResource(String resourceName) throws ParsingException { + return new VLogReasoner(parseKbFromResource(resourceName)); + } } diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java deleted file mode 100644 index c99f081ff..000000000 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61.java +++ /dev/null @@ -1,66 +0,0 @@ -package org.semanticweb.rulewerk.integrationtests.vlogissues; - -/*- - * #%L - * Rulewerk Integration Tests - * %% - * Copyright (C) 2018 - 2021 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; - -import java.io.FileInputStream; -import java.io.IOException; - -import org.junit.Ignore; -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; - -public class VLogIssue61 extends VLogIssue{ - - @Ignore - @Test - public void test01() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-1.rls")); - - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); - - PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); - - reasoner.close(); - } - - @Test - public void test02() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "61-2.rls")); - - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); - - PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); - - reasoner.close(); - } - -} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java similarity index 53% rename from rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java rename to rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java index 422b3d153..960e02279 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,26 +20,35 @@ * #L% */ -import java.io.FileInputStream; +import static org.junit.Assert.assertEquals; + import java.io.IOException; import org.junit.Test; -import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class VLogIssue63 extends VLogIssue { +public class VLogIssue61IT extends VLogIssue { + + @Test + public void ruleset01_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/61-1.rls")) { + reasoner.reason(); - @Test(expected = RulewerkRuntimeException.class) - public void test() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "63.rls")); + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + } + } - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); + @Test + public void ruleset02_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/61-2.rls")) { + reasoner.reason(); - reasoner.close(); + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + } } } diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java new file mode 100644 index 000000000..9412c78d4 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java @@ -0,0 +1,19 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +import java.io.IOException; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class VLogIssue63IT extends VLogIssue { + + @Ignore + @Test + public void test() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/63.rls")) { + reasoner.reason(); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java similarity index 68% rename from rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java rename to rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java index 73377a732..ed9daa56f 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,30 +22,23 @@ import static org.junit.Assert.assertEquals; -import java.io.FileInputStream; import java.io.IOException; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class VLogIssue67 extends VLogIssue{ +public class VLogIssue67IT extends VLogIssue { @Test public void test() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "67.rls")); + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { + reasoner.reason(); - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); - - PositiveLiteral query = RuleParser.parsePositiveLiteral("true(a)"); - assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); - - reasoner.close(); + PositiveLiteral query = RuleParser.parsePositiveLiteral("true(a)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + } } - } diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java index c1c8bd17f..73f7cddd8 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,33 +22,26 @@ import static org.junit.Assert.assertEquals; -import java.io.FileInputStream; import java.io.IOException; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; -public class VLogIssue69 extends VLogIssue{ +public class VLogIssue69 extends VLogIssue { @Test public void test() throws ParsingException, IOException { - KnowledgeBase kb = RuleParser.parse(new FileInputStream(RESOURCES + "69.rls")); + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { + reasoner.reason(); - Reasoner reasoner = new VLogReasoner(kb); - reasoner.reason(); + PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); + assertEquals(2, reasoner.countQueryAnswers(query1, true).getCount()); - PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); - assertEquals(2, reasoner.countQueryAnswers(query1, true).getCount()); - - PositiveLiteral query2 = RuleParser.parsePositiveLiteral("conc(?X,?Y)"); - assertEquals(4, reasoner.countQueryAnswers(query2, true).getCount()); - - reasoner.close(); + PositiveLiteral query2 = RuleParser.parsePositiveLiteral("conc(?X,?Y)"); + assertEquals(4, reasoner.countQueryAnswers(query2, true).getCount()); + } } - } diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls new file mode 100644 index 000000000..0013de9db --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls @@ -0,0 +1,4 @@ +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE50) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls new file mode 100644 index 000000000..a78e9911e --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls @@ -0,0 +1,464 @@ +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE155) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE6) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE189) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE50) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE58) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE2) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE35) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE44) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE176) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE53) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE54) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE55) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE56) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE159) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE14) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE110) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE113) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE21) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE22) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE89) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE23) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE26) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE28) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE67) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE64) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE127) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE154) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE174) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE38) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE74) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE133) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE82) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE85) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE150) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE181) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE175) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE81) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE1) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE90) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE184) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE43) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE92) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE19) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE0) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE188) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE129) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE9) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE96) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE46) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE190) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE97) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE3) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE191) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE99) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE100) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE49) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE4) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0, VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE5) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE193) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE194) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE421) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE423) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE424) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE347) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE348) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE353) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE354) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE287) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE288) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE359) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE360) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE361) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE362) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE363) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE364) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE439) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE440) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE445) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE446) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE443) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE211) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE212) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE215) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE216) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE217) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE218) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE297) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE298) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE295) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE296) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE455) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE456) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE377) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE378) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE305) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE306) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE231) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE232) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE233) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE234) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE237) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE238) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE313) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE314) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE321) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE322) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE391) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE392) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE323) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE324) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE397) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE398) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE477) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE401) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE402) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE255) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE256) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE253) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE254) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE483) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE484) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE409) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE489) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE333) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE334) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE261) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE262) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE265) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE266) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE335) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE336) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE273) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE274) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0, VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-1.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/61-1.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-1.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-2.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/61-2.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-2.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/63.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/63.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/63.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/63.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/67.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/67.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/67.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/67.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/69.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/69.rls similarity index 100% rename from rulewerk-integrationtests/src/test/resources/vlogissues/69.rls rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/69.rls From 36a77732a5c74cdcaaacbb9a40f74c4dd683ea86 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 20:49:39 +0200 Subject: [PATCH 1119/1255] Bump VLog & Trident for CI --- vlog/trident.nix | 6 +++--- vlog/vlog.nix | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/vlog/trident.nix b/vlog/trident.nix index c7c222e51..df418aa77 100644 --- a/vlog/trident.nix +++ b/vlog/trident.nix @@ -1,10 +1,10 @@ { pkgs, lz4, git, zlib, cmake, cacert, sparsehash, kognac, ... }: pkgs.stdenv.mkDerivation { - name = "trident-unstable-2021-02-05"; + name = "trident-unstable-2021-04-01"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/trident"; - rev = "53630ea83460b5e78851b753f245efaefbcaa57f"; - sha256 = "1irjdzjxzwakgalliry23vcl5iqf0w5bm82wra91mlyqmgirnk2x"; + rev = "087e90509434f84e927251c0aa8f1dd91dbb64c7"; + sha256 = "01qw93b0hvvr7vgk24d550mvrcj7lb5chrkh0y68x1mr01di2a87"; }; buildInputs = [ zlib sparsehash lz4 ]; diff --git a/vlog/vlog.nix b/vlog/vlog.nix index 1dd186c47..e74eea379 100644 --- a/vlog/vlog.nix +++ b/vlog/vlog.nix @@ -3,8 +3,8 @@ pkgs.stdenv.mkDerivation { name = "vlog"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/vlog"; - rev = "c20fa48fc284b333ce03e63ca3ad97dc51701542"; - sha256 = "0y1zv4bwb84rv09ihc8jc11hxxffrspk8v01s28cv2nymg2306q4"; + rev = "7356ed98db064ee30300950441716545b819f3a1"; + sha256 = "127jykvgvikyv8nw4ih73qs6cin6ck5bfc0p53svv7hh9zn7vaj2"; }; buildInputs = [ kognac trident sparsehash jdk curl lz4 ]; From c5239878b7aa90bb5bba44d2c833c01902ab7a5a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:03:16 +0200 Subject: [PATCH 1120/1255] Update license headers --- .../vlogissues/RulewerkIssue175IT.java | 20 ++++++++++++++++++ .../vlogissues/VLogIssue.java | 20 ++++++++++++++++++ .../vlogissues/VLogIssue63IT.java | 21 ++++++++++++++++++- 3 files changed, 60 insertions(+), 1 deletion(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java index b6e38d056..cda83279e 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.assertTrue; import java.io.IOException; diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java index d46941a7e..7e304969b 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.integrationtests.IntegrationTest; import org.semanticweb.rulewerk.parser.ParsingException; diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java index 9412c78d4..8016ed069 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.integrationtests.vlogissues; +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import org.junit.Ignore; @@ -9,7 +29,6 @@ public class VLogIssue63IT extends VLogIssue { - @Ignore @Test public void test() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/63.rls")) { From ef91059abac7780ba9fd52b71c86232d99cb9a52 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:03:22 +0200 Subject: [PATCH 1121/1255] Fix typo in nix shell hook --- shell.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shell.nix b/shell.nix index 959415135..9d8842c07 100644 --- a/shell.nix +++ b/shell.nix @@ -5,7 +5,7 @@ let pkgs = import {}; deps = dependencies.deps // { maven = maven; }; in pkgs.mkShell { buildInputsNative = [ maven deps.jdk dependencies.vlog ]; - shellHook = ''' + shellHook = '' mkdir -p rulewerk-vlog/lib/ ln -sf ${dependencies.vlog.dev}/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar mvn --no-transfer-progress initialize -Pdevelopment From c3c6061acbaee27daefbf18eaa5c50fb2093344f Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:05:40 +0200 Subject: [PATCH 1122/1255] Fix test case for VLog issue #63 --- .../rulewerk/integrationtests/vlogissues/VLogIssue63IT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java index 8016ed069..f289764b4 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java @@ -22,14 +22,14 @@ import java.io.IOException; -import org.junit.Ignore; import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; public class VLogIssue63IT extends VLogIssue { - @Test + @Test(expected = RulewerkRuntimeException.class) public void test() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/63.rls")) { reasoner.reason(); From 99f64ebb38c7495c9e77d04bcaef98c76b5de207 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:06:38 +0200 Subject: [PATCH 1123/1255] Fix test case for VLog issue #61 --- .../rulewerk/integrationtests/vlogissues/VLogIssue61IT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java index 960e02279..a79b34c4a 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java @@ -48,7 +48,7 @@ public void ruleset02_succeeds() throws ParsingException, IOException { reasoner.reason(); PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); } } } From 1138d03d8290c61a15c153efd761b90d96b6a87b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:09:38 +0200 Subject: [PATCH 1124/1255] Add workflow for integration tests --- .github/workflows/integration-tests.yml | 21 +++++++++++++++++++ .../workflows/{tests.yml => unit-tests.yml} | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/integration-tests.yml rename .github/workflows/{tests.yml => unit-tests.yml} (97%) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml new file mode 100644 index 000000000..7aaf7f98a --- /dev/null +++ b/.github/workflows/integration-tests.yml @@ -0,0 +1,21 @@ +name: "Rulewerk Integration Tests" +on: + pull_request: + push: +jobs: + tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - uses: cachix/install-nix-action@v12 + with: + nix_path: nixpkgs=channel:nixos-unstable + - uses: cachix/cachix-action@v8 + with: + name: knowsys + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + - env: + BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} + run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV + - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV + - run: nix-shell --run "mvn --no-transfer-progress failsafe:integration-test" diff --git a/.github/workflows/tests.yml b/.github/workflows/unit-tests.yml similarity index 97% rename from .github/workflows/tests.yml rename to .github/workflows/unit-tests.yml index a86878ee9..5cb3cc663 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/unit-tests.yml @@ -1,4 +1,4 @@ -name: "Tests" +name: "Rulewerk Unit Tests" on: pull_request: push: From f411cb8514c59698c38753d984576bf49365cb01 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:14:57 +0200 Subject: [PATCH 1125/1255] Run integration tests only after completion of unit tests --- .github/workflows/integration-tests.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 7aaf7f98a..4f34ae373 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -1,7 +1,9 @@ name: "Rulewerk Integration Tests" on: - pull_request: - push: + workflow_run: + workflows: ["Rulewerk Unit Tests"] + types: + - completed jobs: tests: runs-on: ubuntu-latest From 4a2d9ed1051b3f1250579bc8594f53eb82f5dddf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:37:32 +0200 Subject: [PATCH 1126/1255] Update test badges in README --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d7caf03e6..e8f6208d5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ Rulewerk ====== -[![Build Status](https://github.com/knowsys/rulewerk/workflows/Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Tests) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Unit+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Unit+Tests) +[![Integration Test Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Integration+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Integration+Tests) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From 89ace929324f9b527c56082c352af54f91f82cae Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 16 Apr 2021 21:38:55 +0200 Subject: [PATCH 1127/1255] Fix badges --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e8f6208d5..5f6b233a4 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Rulewerk ====== -[![Build Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Unit+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Unit+Tests) -[![Integration Test Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk+Integration+Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Integration+Tests) +[![Build Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk%20Unit%20Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Unit+Tests) +[![Integration Test Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk%20Integration%20Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Integration+Tests) [![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) From 5a792995129fdbcf9f3a7a6f8b5a5cddb0079180 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 17:38:25 +0200 Subject: [PATCH 1128/1255] Use vlog-1.35 for CI --- .envrc | 1 + vlog/kognac.nix | 3 ++- vlog/trident.nix | 7 ++++--- vlog/vlog.nix | 5 +++-- 4 files changed, 10 insertions(+), 6 deletions(-) create mode 100644 .envrc diff --git a/.envrc b/.envrc new file mode 100644 index 000000000..1d953f4bd --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use nix diff --git a/vlog/kognac.nix b/vlog/kognac.nix index b239aca66..9faa9400e 100644 --- a/vlog/kognac.nix +++ b/vlog/kognac.nix @@ -1,6 +1,7 @@ { pkgs, lz4, git, zlib, cmake, cacert, sparsehash, ...}: pkgs.stdenv.mkDerivation { - name = "kognac-unstable-2020-12-01"; + name = "kognac-unstable"; + version = "2020-12-01"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/kognac"; rev = "8430b081f8d76b11fa6858f3ec31a9ea5a5cf6a9"; diff --git a/vlog/trident.nix b/vlog/trident.nix index df418aa77..b6b0b8a1b 100644 --- a/vlog/trident.nix +++ b/vlog/trident.nix @@ -1,10 +1,11 @@ { pkgs, lz4, git, zlib, cmake, cacert, sparsehash, kognac, ... }: pkgs.stdenv.mkDerivation { - name = "trident-unstable-2021-04-01"; + name = "trident-unstable"; + version = "2021-05-18"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/trident"; - rev = "087e90509434f84e927251c0aa8f1dd91dbb64c7"; - sha256 = "01qw93b0hvvr7vgk24d550mvrcj7lb5chrkh0y68x1mr01di2a87"; + rev = "c24179a17fac7d3ec8214aff9b97b41b21e981b4"; + sha256 = "0bi0366ngk162xjll1cxys6hfynw2xksz1yr7l6hdsx0bx9qvrw4"; }; buildInputs = [ zlib sparsehash lz4 ]; diff --git a/vlog/vlog.nix b/vlog/vlog.nix index e74eea379..761784481 100644 --- a/vlog/vlog.nix +++ b/vlog/vlog.nix @@ -1,10 +1,11 @@ { pkgs, lz4, git, jdk, curl, zlib, cmake, cacert, sparsehash, kognac, trident, ... }: pkgs.stdenv.mkDerivation { name = "vlog"; + version = "1.35"; src = pkgs.fetchgit { url = "git://github.com/karmaresearch/vlog"; - rev = "7356ed98db064ee30300950441716545b819f3a1"; - sha256 = "127jykvgvikyv8nw4ih73qs6cin6ck5bfc0p53svv7hh9zn7vaj2"; + rev = "ca0669424963765d08a63a29a0d89e27cf33ef51"; + sha256 = "10xkc8qfarz3garn2x88p064mx109vqayiijk6zslhmn4r7j465k"; }; buildInputs = [ kognac trident sparsehash jdk curl lz4 ]; From 1a681108ffb78abcdc12fd423c630a013ac48ca6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 17:56:20 +0200 Subject: [PATCH 1129/1255] CI: properly fail workflow when integration tests fail --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 4f34ae373..5feda97d4 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -20,4 +20,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress failsafe:integration-test" + - run: nix-shell --run "mvn --no-transfer-progress failsafe:verify" From c74d93d6056292df1f9e5a9f750b7e46139b80dc Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:13:17 +0200 Subject: [PATCH 1130/1255] CI: run propery verify phase for integration tests, skip unit tests --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 5feda97d4..7f94af9dc 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -20,4 +20,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress failsafe:verify" + - run: nix-shell --run "mvn --no-transfer-progress -Dtest=!*Test -DfailIfNoTests=false verify" From f1205099b50006909b513848bf463d80151f7003 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:20:08 +0200 Subject: [PATCH 1131/1255] CI: just run "mvn verify" for integration tests --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 7f94af9dc..09a4043ce 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -20,4 +20,4 @@ jobs: BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV - - run: nix-shell --run "mvn --no-transfer-progress -Dtest=!*Test -DfailIfNoTests=false verify" + - run: nix-shell --run "mvn --no-transfer-progress verify" From 17e9a1597b9172e61fee05f58eb863a56144748d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:22:12 +0200 Subject: [PATCH 1132/1255] Fix naming of integration test for vlog issue 69 --- .../vlogissues/{VLogIssue69.java => VLogIssue69IT.java} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/{VLogIssue69.java => VLogIssue69IT.java} (92%) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java similarity index 92% rename from rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java rename to rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java index 73f7cddd8..76c897cff 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java @@ -30,10 +30,10 @@ import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; -public class VLogIssue69 extends VLogIssue { +public class VLogIssue69IT extends VLogIssue { @Test - public void test() throws ParsingException, IOException { + public void ruleset_succeeds() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { reasoner.reason(); From f65b1f06283d3d892a62dc64b1f9a2ccf0552058 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 18:22:35 +0200 Subject: [PATCH 1133/1255] Fix integration test for vlog issue 69 --- .../rulewerk/integrationtests/vlogissues/VLogIssue69IT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java index 76c897cff..1ada7cc7e 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java @@ -34,7 +34,7 @@ public class VLogIssue69IT extends VLogIssue { @Test public void ruleset_succeeds() throws ParsingException, IOException { - try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/69.rls")) { reasoner.reason(); PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); From 6edcd6e62ce46e88e51ff7e18215a5039d1a82a2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 20 May 2021 19:09:24 +0200 Subject: [PATCH 1134/1255] Fix integration test for VLog issue 61 --- .../vlogissues/VLogIssue61IT.java | 24 ++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java index a79b34c4a..c5f736ef5 100644 --- a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java @@ -20,25 +20,43 @@ * #L% */ -import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.List; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.parser.ParsingException; import org.semanticweb.rulewerk.parser.RuleParser; public class VLogIssue61IT extends VLogIssue { + boolean hasCorrectAnswers(QueryResultIterator answers) { + int numAnswers = 0; + boolean hasEqualNullsAnswer = false; + + while (answers.hasNext()) { + ++numAnswers; + + List terms = answers.next().getTerms(); + hasEqualNullsAnswer = hasEqualNullsAnswer || (terms.get(1).equals(terms.get(2))); + } + + return hasEqualNullsAnswer && numAnswers <= 2; + } + @Test public void ruleset01_succeeds() throws ParsingException, IOException { try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/61-1.rls")) { reasoner.reason(); PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + assertTrue(hasCorrectAnswers(reasoner.answerQuery(query, true))); } } @@ -48,7 +66,7 @@ public void ruleset02_succeeds() throws ParsingException, IOException { reasoner.reason(); PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); - assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + assertTrue(hasCorrectAnswers(reasoner.answerQuery(query, true))); } } } From f1850727c9371e7b74b4abc98c903bcfa72cc4e6 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:35:39 +0200 Subject: [PATCH 1135/1255] removed unused field variable --- .../rulewerk/parser/RuleParserTest.java | 357 +++++++++--------- 1 file changed, 179 insertions(+), 178 deletions(-) diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index 797d6c59c..0e62bdb79 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -19,8 +19,10 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; import java.util.ArrayList; import java.util.Arrays; @@ -54,213 +56,212 @@ public class RuleParserTest implements ParserTestUtils { private final Constant e = Expressions.makeAbstractConstant("https://example.org/e"); private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); private final Constant xyz = Expressions.makeDatatypeConstant("xyz", PrefixDeclarationRegistry.XSD_STRING); - private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); - private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); - private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); - private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); - private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); - private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); - private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", abc); - private final PositiveLiteral fact4 = Expressions.makePositiveLiteral("https://example.org/s", e); - private final PositiveLiteral fact5 = Expressions.makePositiveLiteral("q", xyz); - private final PositiveLiteral fact6 = Expressions.makePositiveLiteral("http://example.org/p", abc); - private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); - private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); - private final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); - private final Rule rule1 = Expressions.makeRule(head, body1); - private final Rule rule2 = Expressions.makeRule(head, body2); + private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", this.x, this.c); + private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", this.x, this.c); + private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", this.x, this.z); + private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", this.x, this.y); + private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", this.x, this.d); + private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", this.c); + private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", this.abc); + private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", this.abc); + private final PositiveLiteral fact4 = Expressions.makePositiveLiteral("https://example.org/s", this.e); + private final PositiveLiteral fact5 = Expressions.makePositiveLiteral("q", this.xyz); + private final Conjunction body1 = Expressions.makeConjunction(this.atom1, this.atom2); + private final Conjunction body2 = Expressions.makeConjunction(this.negAtom1, this.atom2); + private final Conjunction head = Expressions.makePositiveConjunction(this.atom3, this.atom4); + private final Rule rule1 = Expressions.makeRule(this.head, this.body1); + private final Rule rule2 = Expressions.makeRule(this.head, this.body2); @Test public void testExplicitIri() throws ParsingException { - String input = "() ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "() ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testPrefixResolution() throws ParsingException { - String input = "@prefix ex: . ex:s(ex:c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@prefix ex: . ex:s(ex:c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testBaseRelativeResolution() throws ParsingException { - String input = "@base . () ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@base . () ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testBaseResolution() throws ParsingException { - String input = "@base . s(c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@base . s(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testNoBaseRelativeIri() throws ParsingException { - String input = "s(c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeAbstractConstant("c")); + final String input = "s(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeAbstractConstant("c")); assertEquals(Arrays.asList(atom), statements); } @Test(expected = ParsingException.class) public void testPrefixConflict() throws ParsingException { - String input = "@prefix ex: . @prefix ex: . s(c) ."; + final String input = "@prefix ex: . @prefix ex: . s(c) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testBaseConflict() throws ParsingException { - String input = "@base . @base . s(c) ."; + final String input = "@base . @base . s(c) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testMissingPrefix() throws ParsingException { - String input = "ex:s(c) ."; + final String input = "ex:s(c) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoUniversalLiterals() throws ParsingException { - String input = "p(?X) ."; + final String input = "p(?X) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoExistentialLiterals() throws ParsingException { - String input = "p(!X) ."; + final String input = "p(!X) ."; RuleParser.parse(input); } @Test public void testSimpleRule() throws ParsingException { - String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(rule1), statements); + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule1), statements); } @Test public void testFactWithCommentSymbol() throws ParsingException { - String input = "t(\"%test\") . "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final String input = "t(\"%test\") . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(Expressions.makeFact("t", Expressions.makeDatatypeConstant("%test", PrefixDeclarationRegistry.XSD_STRING))), statements); } @Test public void testNegationRule() throws ParsingException { - String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(rule2), statements); + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule2), statements); } @Test(expected = ParsingException.class) public void testUnsafeNegationRule() throws ParsingException { - String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; RuleParser.parse(input); } @Test public void testWhiteSpace() throws ParsingException { - String input = "@base \n\n . " + final String input = "@base \n\n . " + " q(?X, !Y) , r(?X, d\t ) \n\n:- p(?X,c), p(?X,\n?Z) \n. "; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(rule1), statements); + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule1), statements); } @Test(expected = ParsingException.class) public void testNoUnsafeVariables() throws ParsingException { - String input = "p(?X,?Y) :- q(?X) ."; + final String input = "p(?X,?Y) :- q(?X) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoConflictingQuantificationVariables() throws ParsingException { - String input = "p(?X,!X) :- q(?X) ."; + final String input = "p(?X,!X) :- q(?X) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoBodyExistential() throws ParsingException { - String input = "p(?X) :- q(?X,!Y) ."; + final String input = "p(?X) :- q(?X,!Y) ."; RuleParser.parse(input); } @Test(expected = ParsingException.class) public void testNoDollarVariables() throws ParsingException { - String input = "p($X) :- q($X) ."; + final String input = "p($X) :- q($X) ."; RuleParser.parse(input); } @Test public void testIntegerLiteral() throws ParsingException { - String input = "p(42)"; - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(42)"; + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testAbbreviatedIntegerLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + final String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(Arrays.asList(integerLiteral), statements); } @Test public void testFullIntegerLiteral() throws ParsingException { - String input = "p(\"42\"^^<" + PrefixDeclarationRegistry.XSD_INTEGER + "> )"; - PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(\"42\"^^<" + PrefixDeclarationRegistry.XSD_INTEGER + "> )"; + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testDecimalLiteral() throws ParsingException { - String input = "p(-5.0)"; - PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(-5.0)"; + final PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("-5.0", PrefixDeclarationRegistry.XSD_DECIMAL)); assertEquals(decimalLiteral, RuleParser.parseLiteral(input)); } @Test public void testDoubleLiteral() throws ParsingException { - String input = "p(4.2E9)"; - PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", + final String input = "p(4.2E9)"; + final PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarationRegistry.XSD_DOUBLE)); assertEquals(doubleLiteral, RuleParser.parseLiteral(input)); } @Test public void testStringLiteral() throws ParsingException { - String input = "p(\"abc\")"; - assertEquals(fact2, RuleParser.parseLiteral(input)); + final String input = "p(\"abc\")"; + assertEquals(this.fact2, RuleParser.parseLiteral(input)); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteral() throws ParsingException { - String input = "p(\"abc)"; + final String input = "p(\"abc)"; RuleParser.parseLiteral(input); } @Test public void parseLiteral_escapeSequences_succeeds() throws ParsingException { - String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @@ -268,159 +269,159 @@ public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { @Test public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") - String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) public void parseLiteral_invalidEscapeSequence_throws() throws ParsingException { - String input = "p(\"\\ÿ\")"; + final String input = "p(\"\\ÿ\")"; RuleParser.parseLiteral(input); } @Test(expected = ParsingException.class) public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingException { - String input = "p(\"\\\")"; + final String input = "p(\"\\\")"; RuleParser.parseLiteral(input); } @Test public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { - String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.toString())); } @Test(expected = ParsingException.class) public void testIncompleteStringLiteralMultiLine() throws ParsingException { - String input = "p('''abc\ndef'')"; + final String input = "p('''abc\ndef'')"; RuleParser.parseLiteral(input); } @Test public void testFullLiteral() throws ParsingException { - String input = "p(\"abc\"^^)"; - assertEquals(fact2, RuleParser.parseLiteral(input)); + final String input = "p(\"abc\"^^)"; + assertEquals(this.fact2, RuleParser.parseLiteral(input)); } @Test public void testUnicodeLiteral() throws ParsingException { - String input = "p(\"\\u0061\\u0062\\u0063\")"; // "abc" - assertEquals(fact2, RuleParser.parseLiteral(input)); + final String input = "p(\"\\u0061\\u0062\\u0063\")"; // "abc" + assertEquals(this.fact2, RuleParser.parseLiteral(input)); } @Test public void testUnicodeUri() throws ParsingException { - String input = "@base . @prefix ex: . ex:\\u0073(c) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final String input = "@base . @prefix ex: . ex:\\u0073(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testPrefixedLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact2), statements); + final String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact2), statements); } @Test public void testLangStringLiteral() throws ParsingException { - String input = "p(\"abc\"@en-gb)"; - PositiveLiteral fact = Expressions.makePositiveLiteral("p", + final String input = "p(\"abc\"@en-gb)"; + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeLanguageStringConstant("abc", "en-gb")); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void testLineComments() throws ParsingException { - String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + final String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + " ex:s(ex:c) . % comment \n"; - ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact1), statements); + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); } @Test public void testPositiveLiteral() throws ParsingException { - String input = "(?X,)"; - Literal literal = RuleParser.parsePositiveLiteral(input); - assertEquals(atom1, literal); + final String input = "(?X,)"; + final Literal literal = RuleParser.parsePositiveLiteral(input); + assertEquals(this.atom1, literal); } @Test(expected = ParsingException.class) public void testPositiveLiteralError() throws ParsingException { - String input = "~ (?X,)"; + final String input = "~ (?X,)"; RuleParser.parsePositiveLiteral(input); } @Test public void testLiteral() throws ParsingException { - String input = "~ (?X,)"; - Literal literal = RuleParser.parseLiteral(input); - assertEquals(negAtom1, literal); + final String input = "~ (?X,)"; + final Literal literal = RuleParser.parseLiteral(input); + assertEquals(this.negAtom1, literal); } @Test(expected = ParsingException.class) public void tesLiteralError() throws ParsingException { - String input = "(?X, facts = result.getFacts(); + final String input = "(_:blank) ."; + final KnowledgeBase result = RuleParser.parse(input); + final List facts = result.getFacts(); assertEquals(1, facts.size()); - assertArgumentIsNamedNull(facts.get(0), 1); + this.assertArgumentIsNamedNull(facts.get(0), 1); } @Test public void parseTerm_NamedNull_succeeds() throws ParsingException { - String input = "_:blank"; - Term result = RuleParser.parseTerm(input); - assertUuid(result.getName()); + final String input = "_:blank"; + final Term result = RuleParser.parseTerm(input); + this.assertUuid(result.getName()); } @Test public void parseTerm_NamedNullInHead_succeeds() throws ParsingException { - String input = "_:blank"; - Term result = RuleParser.parseTerm(input, FormulaContext.HEAD); - assertUuid(result.getName()); + final String input = "_:blank"; + final Term result = RuleParser.parseTerm(input, FormulaContext.HEAD); + this.assertUuid(result.getName()); } @Test(expected = ParsingException.class) public void parseTerm_NamedNullInBodyContext_throws() throws ParsingException { - String input = "_:blank"; + final String input = "_:blank"; RuleParser.parseTerm(input, FormulaContext.BODY); } @Test(expected = ParsingException.class) public void testBParsingExceptione() throws ParsingException { - String input = "_:(a) ."; + final String input = "_:(a) ."; RuleParser.parse(input); } @@ -440,24 +441,24 @@ public void testNonIriTypeInDatatypeLiteral() throws ParsingException { public void testIriTypeInDatatypeLiteral() throws ParsingException { final String iri = "whatever"; final String input = "P(\"a\"^^<" + iri + ">)"; - Literal literal = RuleParser.parseLiteral(input); - DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + final Literal literal = RuleParser.parseLiteral(input); + final DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(iri, result.getDatatype()); } @Test public void predicateRelativeNumericIRITest() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("<1.e1>(a)."); // 1.e1 == "10"^^xsd:double - Fact f2 = Expressions.makeFact("1.e1", a); + final AbstractConstantImpl a = new AbstractConstantImpl("a"); + final Fact f = RuleParser.parseFact("<1.e1>(a)."); // 1.e1 == "10"^^xsd:double + final Fact f2 = Expressions.makeFact("1.e1", a); assertEquals(f, f2); } @Test public void predicateAbsoluteIRITest() throws ParsingException { - AbstractConstantImpl a = new AbstractConstantImpl("a"); - Fact f = RuleParser.parseFact("(a)."); - Fact f2 = Expressions.makeFact("a:b", a); + final AbstractConstantImpl a = new AbstractConstantImpl("a"); + final Fact f = RuleParser.parseFact("(a)."); + final Fact f2 = Expressions.makeFact("a:b", a); assertEquals(f, f2); } @@ -479,124 +480,124 @@ public void parse_absoluteIrisInRule_succeeds() throws ParsingException { @Test public void testCustomDatatype() throws ParsingException { final String typename = "http://example.org/#test"; - DatatypeConstant constant = Expressions.makeDatatypeConstant("test", typename); - DatatypeConstantHandler handler = mock(DatatypeConstantHandler.class); - ParserConfiguration parserConfiguration = new ParserConfiguration(); + final DatatypeConstant constant = Expressions.makeDatatypeConstant("test", typename); + final DatatypeConstantHandler handler = mock(DatatypeConstantHandler.class); + final ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDatatype(typename, handler); doReturn(constant).when(handler).createConstant(ArgumentMatchers.eq("hello, world")); - String input = "p(\"hello, world\"^^<" + typename + ">)"; - Literal literal = RuleParser.parseLiteral(input, parserConfiguration); - DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + final String input = "p(\"hello, world\"^^<" + typename + ">)"; + final Literal literal = RuleParser.parseLiteral(input, parserConfiguration); + final DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(constant, result); } @Test public void parse_importStatement_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/facts.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); - List result = knowledgeBase.getFacts(); + final String input = "@import \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_relativeImportStatement_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact3); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact3); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_importStatement_relativeImport_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/subdir/sibling.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact4, fact5); - List result = knowledgeBase.getFacts(); + final String input = "@import \"src/test/resources/subdir/sibling.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact4, this.fact5); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_importStatement_relativeParentImport_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/subdir/parent.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); - List result = knowledgeBase.getFacts(); + final String input = "@import \"src/test/resources/subdir/parent.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_relativeImportStatement_relativeImport_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/subdir/sibling.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact4, fact5); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/subdir/sibling.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact4, this.fact5); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_relativeImportStatement_relativeParentImport_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/subdir/parent.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/subdir/parent.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } @Test public void parse_import_renamesNamedNulls() throws ParsingException { - String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List facts = knowledgeBase.getFacts(); + final String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List facts = knowledgeBase.getFacts(); assertEquals(2, facts.size()); - Fact fact1 = facts.get(0); - Fact fact2 = facts.get(1); + final Fact fact1 = facts.get(0); + final Fact fact2 = facts.get(1); assertNotEquals(fact1, fact2); - assertArgumentIsNamedNull(fact1, 1); - assertArgumentIsNamedNull(fact2, 1); + this.assertArgumentIsNamedNull(fact1, 1); + this.assertArgumentIsNamedNull(fact2, 1); } @Test public void parse_reusedNamedNulls_identical() throws ParsingException { - String input = "p(_:blank) . q(_:blank) . p(_:other) ."; + final String input = "p(_:blank) . q(_:blank) . p(_:other) ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List facts = knowledgeBase.getFacts(); + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List facts = knowledgeBase.getFacts(); assertEquals(3, facts.size()); - Fact fact1 = facts.get(0); - Fact fact2 = facts.get(1); - Fact fact3 = facts.get(2); + final Fact fact1 = facts.get(0); + final Fact fact2 = facts.get(1); + final Fact fact3 = facts.get(2); assertEquals(fact1.getArguments().get(0), fact2.getArguments().get(0)); assertNotEquals(fact1.getArguments().get(0), fact3.getArguments().get(0)); - assertArgumentIsNamedNull(fact1, 1); - assertArgumentIsNamedNull(fact2, 1); - assertArgumentIsNamedNull(fact3, 1); + this.assertArgumentIsNamedNull(fact1, 1); + this.assertArgumentIsNamedNull(fact2, 1); + this.assertArgumentIsNamedNull(fact3, 1); } @Test public void parseInto_duplicateImportStatements_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/facts.rls\" . "; - KnowledgeBase knowledgeBase = RuleParser.parse(input); + final String input = "@import \"src/test/resources/facts.rls\" . "; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } @Test public void parseInto_duplicateRelativeImportStatements_succeeds() throws ParsingException { - String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); + final String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } @Test public void parseInto_relativeImportRedeclaringBase_succeeds() throws ParsingException { - String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; - KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact3); - List result = knowledgeBase.getFacts(); + final String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact3); + final List result = knowledgeBase.getFacts(); assertEquals(expected, result); } } From 44766c5f304e6a8d28ff506d3ceb20fff5a81b90 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:37:04 +0200 Subject: [PATCH 1136/1255] update dependency version to VLog --- rulewerk-vlog/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index 798211aea..a4a504c49 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -17,7 +17,7 @@ Bindings for the VLog reasoner backend. - 1.3.4 + 1.3.5 vlog-java From 02d0a7b9f08d56aa86e8e18ad96772933a915fca Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:49:30 +0200 Subject: [PATCH 1137/1255] Update RELEASE-NOTES.md --- RELEASE-NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index faf83bafe..58850e818 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -8,6 +8,7 @@ Bugfixes: * Encoding of RDF strings corrected to make sure VLog succeeds joining on strings * Fixed handling of trident databases that are not a direct child of the current working directory * Fixed encoding of language-tagged strings that are used in Rulewerk facts, which had caused an exception +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now Rulewerk v0.7.0 --------------- From 9971937f7fbfd97ad917cc2a1cc2aaedc95391e3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 13:57:42 +0200 Subject: [PATCH 1138/1255] Update README.md Describe rulewerk-integrationtests test module --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 5f6b233a4..fa2c78a23 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,8 @@ You need to use Java 1.8 or above. Available source modules include: * **rulewerk-commands**: support for running commands, as done by the client * **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. +Test module **rulewerk-integrationtests** contains integration tests that verify the correctness of the backend reasoners for various complex reasoning problems. + The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: * (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. * Delete (if existing) previous local builds (`local_builds` directory). From 588125648017aeca882ed1f2dc5f9ad76c354cba Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Sun, 23 May 2021 14:01:19 +0200 Subject: [PATCH 1139/1255] update to release version 0.8.0 --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-integrationtests/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index be85d779e..8e80c639c 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 coverage diff --git a/pom.xml b/pom.xml index 7ae152180..c235cff02 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index 695e9a6a4..a613d4e9a 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 773e18561..461e18428 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 3ab864a66..30fa5685a 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index 341378aec..e5caee14f 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index 13631b3cb..b86c3e8c5 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-graal diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index 91bd919e3..ca7bcee5f 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-integrationtests diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index be81cdf49..587981a15 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index e495e5247..e73738aec 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index e64742507..178cba9ae 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index a4a504c49..ab257bf3c 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0-SNAPSHOT + 0.8.0 rulewerk-vlog From 9dd07d0484673d3210fa7627c7fb9dc126e1d397 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 May 2021 16:26:15 +0200 Subject: [PATCH 1140/1255] update to snapshot version --- coverage/pom.xml | 2 +- pom.xml | 2 +- rulewerk-client/pom.xml | 2 +- rulewerk-commands/pom.xml | 2 +- rulewerk-core/pom.xml | 2 +- rulewerk-examples/pom.xml | 2 +- rulewerk-graal/pom.xml | 2 +- rulewerk-integrationtests/pom.xml | 2 +- rulewerk-owlapi/pom.xml | 2 +- rulewerk-parser/pom.xml | 2 +- rulewerk-rdf/pom.xml | 2 +- rulewerk-vlog/pom.xml | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/coverage/pom.xml b/coverage/pom.xml index 8e80c639c..5d59b5e78 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT coverage diff --git a/pom.xml b/pom.xml index c235cff02..bb8f5d302 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT pom Rulewerk diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml index a613d4e9a..079f43e32 100644 --- a/rulewerk-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-client diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml index 461e18428..436c9f7a8 100644 --- a/rulewerk-commands/pom.xml +++ b/rulewerk-commands/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-commands diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 30fa5685a..bb19807a6 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-core diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml index e5caee14f..5daeac2c1 100644 --- a/rulewerk-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-examples diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml index b86c3e8c5..41467c8d5 100644 --- a/rulewerk-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-graal diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml index ca7bcee5f..6202b8896 100644 --- a/rulewerk-integrationtests/pom.xml +++ b/rulewerk-integrationtests/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-integrationtests diff --git a/rulewerk-owlapi/pom.xml b/rulewerk-owlapi/pom.xml index 587981a15..f4c4cbff9 100644 --- a/rulewerk-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -6,7 +6,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-owlapi diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml index e73738aec..982e14f02 100644 --- a/rulewerk-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-parser diff --git a/rulewerk-rdf/pom.xml b/rulewerk-rdf/pom.xml index 178cba9ae..9c93045bc 100644 --- a/rulewerk-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -8,7 +8,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-rdf diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml index ab257bf3c..4c0837a50 100644 --- a/rulewerk-vlog/pom.xml +++ b/rulewerk-vlog/pom.xml @@ -7,7 +7,7 @@ org.semanticweb.rulewerk rulewerk-parent - 0.8.0 + 0.9.0-SNAPSHOT rulewerk-vlog From ef0dd26bb348bb9cdd13c375c9349213def9866f Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 May 2021 16:27:45 +0200 Subject: [PATCH 1141/1255] Update README.md update latest release version --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index fa2c78a23..884fa89ed 100644 --- a/README.md +++ b/README.md @@ -10,13 +10,13 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.7.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.8.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` org.semanticweb.rulewerk rulewerk-core - 0.7.0 + 0.8.0 ``` From 9d739549af1c32dd5fe8361866d6aba4a48f06f1 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 25 May 2021 16:55:49 +0200 Subject: [PATCH 1142/1255] fix javadoc errors --- .../rulewerk/client/picocli/Main.java | 8 +- .../rulewerk/core/model/api/Term.java | 3 +- .../core/model/implementation/Serializer.java | 219 +++++++++--------- .../rulewerk/core/reasoner/Reasoner.java | 20 +- .../DataSourceConfigurationVisitor.java | 6 +- .../implementation/Skolemization.java | 36 +-- .../rulewerk/rdf/RdfModelConverter.java | 44 ++-- 7 files changed, 169 insertions(+), 167 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java index f8b59bb0a..0193bc7da 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -52,12 +52,12 @@ public class Main { * Launches the client application for Rulewerk. The functionality depends on * the given command-line args ({@code args}): *
      - *
    • empty args ("") or argument "shell"
    • launch an - * interactive shell. + *
    • empty args ("") or argument "shell" launch an interactive + * shell.
    • *
    • argument "materialize" can be used with different options to complete * several materialization and querying tasks from the command line.
    • - *
    *
  • help
  • + * * * @param args * @@ -92,7 +92,7 @@ static void displayHelp(final String[] args, final PrintStream printStream) { /** * Configures {@link Logger} settings. Messages are logged to the console. Log - * level is set to {@link Level.FATAL}. + * level is set to {@link Level#FATAL}. */ public static void configureLogging() { // Create the appender that will write log messages to the console. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index 3bbabcfe4..bb0be440d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -65,8 +65,9 @@ default boolean isVariable() { /** * Accept a {@link TermVisitor} and return its output. - * + * * @param termVisitor the TermVisitor + * @param type associated to the given TermVisitor * @return output of the visitor */ T accept(TermVisitor termVisitor); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 741aba0a5..258d0f7f9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -40,6 +40,7 @@ import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.Literal; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.rulewerk.core.model.api.Rule; @@ -70,7 +71,7 @@ public class Serializer { */ public static final Function identityIriSerializer = new Function() { @Override - public String apply(String iri) { + public String apply(final String iri) { if (iri.contains(":") || !iri.matches(AbstractPrefixDeclarationRegistry.REGEXP_LOCNAME)) { return "<" + iri + ">"; } else { @@ -103,13 +104,13 @@ private class RuntimeIoException extends RuntimeException { private static final long serialVersionUID = 1L; final IOException cause; - public RuntimeIoException(IOException cause) { + public RuntimeIoException(final IOException cause) { super(cause); this.cause = cause; } public IOException getIoException() { - return cause; + return this.cause; } } @@ -122,60 +123,60 @@ public IOException getIoException() { private class SerializerTermVisitor implements TermVisitor { @Override - public Void visit(AbstractConstant term) { + public Void visit(final AbstractConstant term) { try { Serializer.this.writeAbstractConstant(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(DatatypeConstant term) { + public Void visit(final DatatypeConstant term) { try { Serializer.this.writeDatatypeConstant(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(LanguageStringConstant term) { + public Void visit(final LanguageStringConstant term) { try { Serializer.this.writeLanguageStringConstant(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(UniversalVariable term) { + public Void visit(final UniversalVariable term) { try { Serializer.this.writeUniversalVariable(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(ExistentialVariable term) { + public Void visit(final ExistentialVariable term) { try { Serializer.this.writeExistentialVariable(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(NamedNull term) { + public Void visit(final NamedNull term) { try { Serializer.this.writeNamedNull(term); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; @@ -192,30 +193,30 @@ public Void visit(NamedNull term) { private class SerializerStatementVisitor implements StatementVisitor { @Override - public Void visit(Fact statement) { + public Void visit(final Fact statement) { try { Serializer.this.writeFact(statement); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(Rule statement) { + public Void visit(final Rule statement) { try { Serializer.this.writeRule(statement); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; } @Override - public Void visit(DataSourceDeclaration statement) { + public Void visit(final DataSourceDeclaration statement) { try { Serializer.this.writeDataSourceDeclaration(statement); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeIoException(e); } return null; @@ -252,7 +253,7 @@ public Serializer(final Writer writer) { * @param writer the object used to write serializations * @param prefixDeclarationRegistry the object used to abbreviate IRIs */ - public Serializer(final Writer writer, PrefixDeclarationRegistry prefixDeclarationRegistry) { + public Serializer(final Writer writer, final PrefixDeclarationRegistry prefixDeclarationRegistry) { this(writer, (string) -> { return prefixDeclarationRegistry.unresolveAbsoluteIri(string, true); }); @@ -261,13 +262,13 @@ public Serializer(final Writer writer, PrefixDeclarationRegistry prefixDeclarati /** * Writes a serialization of the given {@link Statement}. * - * @param term a {@link Statement} + * @param statement a {@link Statement} to serialize * @throws IOException */ - public void writeStatement(Statement statement) throws IOException { + public void writeStatement(final Statement statement) throws IOException { try { statement.accept(this.serializerStatementVisitor); - } catch (Serializer.RuntimeIoException e) { + } catch (final Serializer.RuntimeIoException e) { throw e.getIoException(); } } @@ -278,9 +279,9 @@ public void writeStatement(Statement statement) throws IOException { * @param fact a {@link Fact} * @throws IOException */ - public void writeFact(Fact fact) throws IOException { - writeLiteral(fact); - writer.write(STATEMENT_END); + public void writeFact(final Fact fact) throws IOException { + this.writeLiteral(fact); + this.writer.write(STATEMENT_END); } /** @@ -289,9 +290,9 @@ public void writeFact(Fact fact) throws IOException { * @param rule a {@link Rule} * @throws IOException */ - public void writeRule(Rule rule) throws IOException { - writeRuleNoStatment(rule); - writer.write(STATEMENT_END); + public void writeRule(final Rule rule) throws IOException { + this.writeRuleNoStatment(rule); + this.writer.write(STATEMENT_END); } /** @@ -300,10 +301,10 @@ public void writeRule(Rule rule) throws IOException { * @param rule a {@link Rule} * @throws IOException */ - private void writeRuleNoStatment(Rule rule) throws IOException { - writeLiteralConjunction(rule.getHead()); - writer.write(" :- "); - writeLiteralConjunction(rule.getBody()); + private void writeRuleNoStatment(final Rule rule) throws IOException { + this.writeLiteralConjunction(rule.getHead()); + this.writer.write(" :- "); + this.writeLiteralConjunction(rule.getBody()); } /** @@ -312,12 +313,12 @@ private void writeRuleNoStatment(Rule rule) throws IOException { * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @throws IOException */ - public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclaration) throws IOException { - writer.write("@source "); - writePredicate(dataSourceDeclaration.getPredicate()); - writer.write(": "); - writeLiteral(dataSourceDeclaration.getDataSource().getDeclarationFact()); - writer.write(STATEMENT_END); + public void writeDataSourceDeclaration(final DataSourceDeclaration dataSourceDeclaration) throws IOException { + this.writer.write("@source "); + this.writePredicate(dataSourceDeclaration.getPredicate()); + this.writer.write(": "); + this.writeLiteral(dataSourceDeclaration.getDataSource().getDeclarationFact()); + this.writer.write(STATEMENT_END); } /** @@ -326,11 +327,11 @@ public void writeDataSourceDeclaration(DataSourceDeclaration dataSourceDeclarati * @param literal a {@link Literal} * @throws IOException */ - public void writeLiteral(Literal literal) throws IOException { + public void writeLiteral(final Literal literal) throws IOException { if (literal.isNegated()) { - writer.write("~"); + this.writer.write("~"); } - writePositiveLiteral(literal.getPredicate(), literal.getArguments()); + this.writePositiveLiteral(literal.getPredicate(), literal.getArguments()); } /** @@ -341,21 +342,21 @@ public void writeLiteral(Literal literal) throws IOException { * @param arguments a list of {@link Term} arguments * @throws IOException */ - public void writePositiveLiteral(Predicate predicate, List arguments) throws IOException { - writer.write(getIri(predicate.getName())); - writer.write("("); + public void writePositiveLiteral(final Predicate predicate, final List arguments) throws IOException { + this.writer.write(this.getIri(predicate.getName())); + this.writer.write("("); boolean first = true; for (final Term term : arguments) { if (first) { first = false; } else { - writer.write(", "); + this.writer.write(", "); } - writeTerm(term); + this.writeTerm(term); } - writer.write(")"); + this.writer.write(")"); } /** @@ -371,9 +372,9 @@ public void writeLiteralConjunction(final Conjunction literal if (first) { first = false; } else { - writer.write(", "); + this.writer.write(", "); } - writeLiteral(literal); + this.writeLiteral(literal); } } @@ -384,11 +385,11 @@ public void writeLiteralConjunction(final Conjunction literal * @param predicate a {@link Predicate} * @throws IOException */ - public void writePredicate(Predicate predicate) throws IOException { - writer.write(getIri(predicate.getName())); - writer.write("["); - writer.write(String.valueOf(predicate.getArity())); - writer.write("]"); + public void writePredicate(final Predicate predicate) throws IOException { + this.writer.write(this.getIri(predicate.getName())); + this.writer.write("["); + this.writer.write(String.valueOf(predicate.getArity())); + this.writer.write("]"); } /** @@ -397,10 +398,10 @@ public void writePredicate(Predicate predicate) throws IOException { * @param term a {@link Term} * @throws IOException */ - public void writeTerm(Term term) throws IOException { + public void writeTerm(final Term term) throws IOException { try { term.accept(this.serializerTermVisitor); - } catch (Serializer.RuntimeIoException e) { + } catch (final Serializer.RuntimeIoException e) { throw e.getIoException(); } } @@ -411,8 +412,8 @@ public void writeTerm(Term term) throws IOException { * @param abstractConstant a {@link AbstractConstant} * @throws IOException */ - public void writeAbstractConstant(AbstractConstant abstractConstant) throws IOException { - writer.write(getIri(abstractConstant.getName())); + public void writeAbstractConstant(final AbstractConstant abstractConstant) throws IOException { + this.writer.write(this.getIri(abstractConstant.getName())); } /** @@ -421,13 +422,13 @@ public void writeAbstractConstant(AbstractConstant abstractConstant) throws IOEx * @param datatypeConstant a {@link DatatypeConstant} * @throws IOException */ - public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOException { + public void writeDatatypeConstant(final DatatypeConstant datatypeConstant) throws IOException { if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) { - writer.write(getQuotedString(datatypeConstant.getLexicalValue())); + this.writer.write(this.getQuotedString(datatypeConstant.getLexicalValue())); } else if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype())) { - writer.write(datatypeConstant.getLexicalValue()); + this.writer.write(datatypeConstant.getLexicalValue()); } else { - writeDatatypeConstantNoAbbreviations(datatypeConstant); + this.writeDatatypeConstantNoAbbreviations(datatypeConstant); } } @@ -438,10 +439,10 @@ public void writeDatatypeConstant(DatatypeConstant datatypeConstant) throws IOEx * @param datatypeConstant a {@link DatatypeConstant} * @throws IOException */ - public void writeDatatypeConstantNoAbbreviations(DatatypeConstant datatypeConstant) throws IOException { - writer.write(getQuotedString(datatypeConstant.getLexicalValue())); - writer.write("^^"); - writer.write(getIri(datatypeConstant.getDatatype())); + public void writeDatatypeConstantNoAbbreviations(final DatatypeConstant datatypeConstant) throws IOException { + this.writer.write(this.getQuotedString(datatypeConstant.getLexicalValue())); + this.writer.write("^^"); + this.writer.write(this.getIri(datatypeConstant.getDatatype())); } /** @@ -450,9 +451,9 @@ public void writeDatatypeConstantNoAbbreviations(DatatypeConstant datatypeConsta * @param universalVariable a {@link UniversalVariable} * @throws IOException */ - public void writeUniversalVariable(UniversalVariable universalVariable) throws IOException { - writer.write("?"); - writer.write(universalVariable.getName()); + public void writeUniversalVariable(final UniversalVariable universalVariable) throws IOException { + this.writer.write("?"); + this.writer.write(universalVariable.getName()); } /** @@ -461,9 +462,9 @@ public void writeUniversalVariable(UniversalVariable universalVariable) throws I * @param existentialVariable a {@link ExistentialVariable} * @throws IOException */ - public void writeExistentialVariable(ExistentialVariable existentialVariable) throws IOException { - writer.write("!"); - writer.write(existentialVariable.getName()); + public void writeExistentialVariable(final ExistentialVariable existentialVariable) throws IOException { + this.writer.write("!"); + this.writer.write(existentialVariable.getName()); } /** @@ -472,9 +473,9 @@ public void writeExistentialVariable(ExistentialVariable existentialVariable) th * @param namedNull a {@link NamedNull} * @throws IOException */ - public void writeNamedNull(NamedNull namedNull) throws IOException { - writer.write("_:"); - writer.write(namedNull.getName()); + public void writeNamedNull(final NamedNull namedNull) throws IOException { + this.writer.write("_:"); + this.writer.write(namedNull.getName()); } /** @@ -485,29 +486,29 @@ public void writeNamedNull(NamedNull namedNull) throws IOException { * @throws IOException * @return true if anything has been written */ - public boolean writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) + public boolean writePrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDeclarationRegistry) throws IOException { boolean result = false; final String baseIri = prefixDeclarationRegistry.getBaseIri(); if (!PrefixDeclarationRegistry.EMPTY_BASE.contentEquals(baseIri)) { - writer.write("@base <"); - writer.write(baseIri); - writer.write(">"); - writer.write(STATEMENT_END); - writer.write("\n"); + this.writer.write("@base <"); + this.writer.write(baseIri); + this.writer.write(">"); + this.writer.write(STATEMENT_END); + this.writer.write("\n"); result = true; } - Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); + final Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); while (prefixIterator.hasNext()) { - Entry entry = prefixIterator.next(); - writer.write("@prefix "); - writer.write(entry.getKey()); - writer.write(" <"); - writer.write(entry.getValue()); - writer.write(">"); - writer.write(STATEMENT_END); - writer.write("\n"); + final Entry entry = prefixIterator.next(); + this.writer.write("@prefix "); + this.writer.write(entry.getKey()); + this.writer.write(" <"); + this.writer.write(entry.getValue()); + this.writer.write(">"); + this.writer.write(STATEMENT_END); + this.writer.write("\n"); result = true; } return result; @@ -519,10 +520,10 @@ public boolean writePrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDe * @param languageStringConstant a {@link LanguageStringConstant} * @throws IOException */ - public void writeLanguageStringConstant(LanguageStringConstant languageStringConstant) throws IOException { - writer.write(getQuotedString(languageStringConstant.getString())); - writer.write("@"); - writer.write(languageStringConstant.getLanguageTag()); + public void writeLanguageStringConstant(final LanguageStringConstant languageStringConstant) throws IOException { + this.writer.write(this.getQuotedString(languageStringConstant.getString())); + this.writer.write("@"); + this.writer.write(languageStringConstant.getLanguageTag()); } /** @@ -531,21 +532,21 @@ public void writeLanguageStringConstant(LanguageStringConstant languageStringCon * @param command a {@link Command} * @throws IOException */ - public void writeCommand(Command command) throws IOException { - writer.write("@"); - writer.write(command.getName()); + public void writeCommand(final Command command) throws IOException { + this.writer.write("@"); + this.writer.write(command.getName()); - for (Argument argument : command.getArguments()) { - writer.write(" "); + for (final Argument argument : command.getArguments()) { + this.writer.write(" "); if (argument.fromRule().isPresent()) { - writeRuleNoStatment(argument.fromRule().get()); + this.writeRuleNoStatment(argument.fromRule().get()); } else if (argument.fromPositiveLiteral().isPresent()) { - writeLiteral(argument.fromPositiveLiteral().get()); + this.writeLiteral(argument.fromPositiveLiteral().get()); } else { - writeTerm(argument.fromTerm().get()); + this.writeTerm(argument.fromTerm().get()); } } - writer.write(STATEMENT_END); + this.writer.write(STATEMENT_END); } /** @@ -555,12 +556,12 @@ public void writeCommand(Command command) throws IOException { * a string * @return serialization string */ - public static String getSerialization(SerializationWriter writeAction) { + public static String getSerialization(final SerializationWriter writeAction) { final StringWriter stringWriter = new StringWriter(); final Serializer serializer = new Serializer(stringWriter); try { writeAction.write(serializer); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeException("StringWriter should never throw an IOException."); } return stringWriter.toString(); @@ -588,6 +589,6 @@ private String getQuotedString(final String string) { } private String getIri(final String string) { - return iriTransformer.apply(string); + return this.iriTransformer.apply(string); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index d94ea7128..156a03d23 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -117,10 +117,10 @@ public interface InferenceAction { * @return the correctness of the inferences, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. */ - default Correctness unsafeForEachInference(BiConsumer> action) { + default Correctness unsafeForEachInference(final BiConsumer> action) { try { - return forEachInference(action::accept); - } catch (IOException e) { + return this.forEachInference(action::accept); + } catch (final IOException e) { throw new RulewerkRuntimeException(e); } } @@ -129,18 +129,18 @@ default Correctness unsafeForEachInference(BiConsumer> act * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. * - * @param stream an OutputStream for the facts to be written to. + * @param writer the {@link Writer} used to write inferences. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException */ - default Correctness writeInferences(Writer writer) throws IOException { - final PrefixDeclarationRegistry prefixDeclarationRegistry = getKnowledgeBase().getPrefixDeclarationRegistry(); + default Correctness writeInferences(final Writer writer) throws IOException { + final PrefixDeclarationRegistry prefixDeclarationRegistry = this.getKnowledgeBase().getPrefixDeclarationRegistry(); final Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); - return forEachInference((predicate, termList) -> { + return this.forEachInference((predicate, termList) -> { serializer.writePositiveLiteral(predicate, termList); writer.write(" .\n"); }); @@ -153,8 +153,8 @@ default Correctness writeInferences(Writer writer) throws IOException { * inferences. */ default Stream getInferences() { - Stream.Builder builder = Stream.builder(); - unsafeForEachInference((predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); + final Stream.Builder builder = Stream.builder(); + this.unsafeForEachInference((predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); return builder.build(); } @@ -180,7 +180,7 @@ default Stream getInferences() { * method will disappear. */ @Deprecated - default Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + default Correctness writeInferences(final String filePath) throws FileNotFoundException, IOException { try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { return this.writeInferences(writer); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java index 91c78b4e1..cb227662c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -33,7 +33,7 @@ public interface DataSourceConfigurationVisitor { * Configure the reasoner for a {@link CsvFileDataSource}. * * @param dataSource the data source to configure - * @throws IOexception when an IO error occurs during configuration + * @throws IOException when an IO error occurs during configuration */ public void visit(CsvFileDataSource dataSource) throws IOException; @@ -41,7 +41,7 @@ public interface DataSourceConfigurationVisitor { * Configure the reasoner for a {@link RdfFileDataSource}. * * @param dataSource the data source to configure - * @throws IOexception when an IO error occurs during configuration + * @throws IOException when an IO error occurs during configuration */ public void visit(RdfFileDataSource dataSource) throws IOException; @@ -49,7 +49,7 @@ public interface DataSourceConfigurationVisitor { * Configure the reasoner for a {@link TridentDataSource}. * * @param dataSource the data source to configure - * @throws IOexception when an IO error occurs during configuration + * @throws IOException when an IO error occurs during configuration */ public void visit(TridentDataSource dataSource) throws IOException; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 54080781b..4f03dde77 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -66,8 +66,8 @@ public class Skolemization { * @return a {@link RenamedNamedNull} with a new name that is specific to this * instance and {@code name}. */ - public RenamedNamedNull getRenamedNamedNull(String name) { - return new RenamedNamedNull(getFreshName(name)); + public RenamedNamedNull getRenamedNamedNull(final String name) { + return new RenamedNamedNull(this.getFreshName(name)); } /** @@ -80,8 +80,8 @@ public RenamedNamedNull getRenamedNamedNull(String name) { * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code name}. */ - public AbstractConstant getSkolemConstant(String name, TermFactory termFactory) { - return termFactory.makeAbstractConstant(getSkolemConstantName(name)); + public AbstractConstant getSkolemConstant(final String name, final TermFactory termFactory) { + return termFactory.makeAbstractConstant(this.getSkolemConstantName(name)); } /** @@ -95,8 +95,8 @@ public AbstractConstant getSkolemConstant(String name, TermFactory termFactory) * @return a {@link AbstractConstant} with an IRI that is specific to this * instance and {@code namedNull}. */ - public AbstractConstant getSkolemConstant(NamedNull namedNull, TermFactory termFactory) { - return termFactory.makeAbstractConstant(getSkolemConstantName(namedNull)); + public AbstractConstant getSkolemConstant(final NamedNull namedNull, final TermFactory termFactory) { + return termFactory.makeAbstractConstant(this.getSkolemConstantName(namedNull)); } @@ -108,8 +108,8 @@ public AbstractConstant getSkolemConstant(NamedNull namedNull, TermFactory termF * other string for which to create a unique renaming) * @return string that is an IRI for a skolem constant */ - public String getSkolemConstantName(String name) { - return getSkolemConstantNameFromUniqueName(getFreshName(name).toString()); + public String getSkolemConstantName(final String name) { + return this.getSkolemConstantNameFromUniqueName(this.getFreshName(name).toString()); } /** @@ -117,15 +117,15 @@ public String getSkolemConstantName(String name) { * named {@link NamedNull}. The method ensures that a new unique name is * generated unless the given object is already a {@link RenamedNamedNull}. * - * @param name the name of the {@link NamedNull} to be renamed here (or any - * other string for which to create a unique renaming) + * @param namedNull the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) * @return string that is an IRI for a skolem constant */ - public String getSkolemConstantName(NamedNull namedNull) { + public String getSkolemConstantName(final NamedNull namedNull) { if (namedNull instanceof RenamedNamedNull) { - return getSkolemConstantNameFromUniqueName(namedNull.getName()); + return this.getSkolemConstantNameFromUniqueName(namedNull.getName()); } else { - return getSkolemConstantName(namedNull.getName()); + return this.getSkolemConstantName(namedNull.getName()); } } @@ -135,7 +135,7 @@ public String getSkolemConstantName(NamedNull namedNull) { * @param name local id of skolem constant * @return IRI string */ - private String getSkolemConstantNameFromUniqueName(String name) { + private String getSkolemConstantNameFromUniqueName(final String name) { return SKOLEM_IRI_PREFIX + SKOLEM_UUID_START + name; } @@ -147,10 +147,10 @@ private String getSkolemConstantNameFromUniqueName(String name) { * @param name the string to be renamed * @return a UUID for the new name */ - public UUID getFreshName(String name) { - byte[] nameBytes = name.getBytes(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - stream.write(namedNullNamespace, 0, namedNullNamespace.length); + public UUID getFreshName(final String name) { + final byte[] nameBytes = name.getBytes(); + final ByteArrayOutputStream stream = new ByteArrayOutputStream(); + stream.write(this.namedNullNamespace, 0, this.namedNullNamespace.length); stream.write(nameBytes, 0, nameBytes.length); return UUID.nameUUIDFromBytes(stream.toByteArray()); } diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index 768cc945a..7bc936719 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -31,10 +31,10 @@ import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; -import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; @@ -50,8 +50,8 @@ * given {@code rdfModel} into an {@link PositiveLiteral} of the form * {@code TRIPLE(subject, predicate, object)}. The ternary predicate used for * all literals generated from RDF triples is - * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE}. Subject, predicate and object - * {@link Value}s are converted to corresponding {@link Term}s: + * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE_NAME}. Subject, predicate and + * object {@link Value}s are converted to corresponding {@link Term}s: *
      + *
    • {@code \}
    • + *
    • {@code "}
    • + *
    • {@code \t}
    • + *
    • {@code \b}
    • + *
    • {@code \n}
    • + *
    • {@code \r}
    • + *
    • {@code \f}
    • + *
        + * Example for {@code string = "\\a"}, the returned value is + * {@code string = "\"\\\\a\""} + * + * @param string + * @return an escaped string surrounded by {@code "}. + */ + public static String getString(final String string) { + return addQuotes(escape(string)); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *
          + *
        • {@code \}
        • + *
        • {@code "}
        • + *
        • {@code \t}
        • + *
        • {@code \b}
        • + *
        • {@code \n}
        • + *
        • {@code \r}
        • + *
        • {@code \f}
        • + *
            * * @param string * @return an escaped string */ - public static String escape(final String string) { + private static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings @@ -351,6 +384,7 @@ private static String addQuotes(final String string) { return QUOTE + string + QUOTE; } + private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java index a2124804a..c02772192 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java @@ -91,8 +91,8 @@ public void toString_CsvFileDataSource() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); - assertEquals("@source q[1]: load-csv(\"" + expectedFilePath + "\") .", + final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } @@ -118,7 +118,7 @@ public void toString_RdfFileDataSource_relativePath() throws IOException { final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = Serializer.escape(relativeDirName + File.separator + fileName); - assertEquals("@source q[1]: load-rdf(\"" + expectedFilePath + "\") .", dataSourceDeclaration.toString()); + final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } From 0b9ecba4e65515312d55cc2bf516e78ac259e798 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 7 Jan 2020 13:51:36 +0100 Subject: [PATCH 0713/1255] Disable parallel test execution Parallel test execution seems to cause corrupted coverage files when building on Windows, so disable it. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e7754337d..95ee3ad64 100644 --- a/pom.xml +++ b/pom.xml @@ -233,7 +233,7 @@ 3.0.0-M4 ${surefireArgLine} - 1C + 1 true From af2d6f4dd0992206d7617d1e654573c779d915d7 Mon Sep 17 00:00:00 2001 From: alloka Date: Tue, 7 Jan 2020 19:36:06 +0100 Subject: [PATCH 0714/1255] added methods to obtain facts --- .../vlog4j/core/reasoner/KnowledgeBase.java | 869 +++++++++--------- .../vlog4j/core/reasoner/Reasoner.java | 5 + .../reasoner/implementation/VLogReasoner.java | 50 + .../examples/SimpleReasoningExample.java | 1 - 4 files changed, 489 insertions(+), 436 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 54c4a256f..d63f9be70 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,435 +1,434 @@ -package org.semanticweb.vlog4j.core.reasoner; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * A knowledge base with rules, facts, and declarations for loading data from - * further sources. This is a "syntactic" object in that it represents some - * information that is not relevant for the semantics of reasoning, but that is - * needed to ensure faithful re-serialisation of knowledge bases loaded from - * files (e.g., preserving order). - * - * @author Markus Kroetzsch - * - */ -public class KnowledgeBase implements Iterable { - - private final Set listeners = new HashSet<>(); - - /** - * Auxiliary class to process {@link Statement}s when added to the knowledge - * base. Returns true if a statement was added successfully. - * - * @author Markus Kroetzsch - * - */ - private class AddStatementVisitor implements StatementVisitor { - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.addFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.add(statement); - return true; - } - } - - private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); - - /** - * Auxiliary class to process {@link Statement}s when removed from the knowledge - * base. Returns true if a statement was removed successfully. - * - * @author Irina Dragoste - * - */ - private class RemoveStatementVisitor implements StatementVisitor { - - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.removeFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.remove(statement); - return true; - } - } - - private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); - - private class ExtractStatementsVisitor implements StatementVisitor { - - final ArrayList extracted = new ArrayList<>(); - final Class ownType; - - ExtractStatementsVisitor(final Class type) { - this.ownType = type; - } - - ArrayList getExtractedStatements() { - return this.extracted; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Fact statement) { - if (this.ownType.equals(Fact.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Rule statement) { - if (this.ownType.equals(Rule.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final DataSourceDeclaration statement) { - if (this.ownType.equals(DataSourceDeclaration.class)) { - this.extracted.add((T) statement); - } - return null; - } - } - - /** - * The primary storage for the contents of the knowledge base. - */ - private final LinkedHashSet statements = new LinkedHashSet<>(); - -// TODO support prefixes -// /** -// * Known prefixes that can be used to pretty-print the contents of the knowledge -// * base. We try to preserve user-provided prefixes found in files when loading -// * data. -// */ -// PrefixDeclarations prefixDeclarations; - - /** - * Index structure that organises all facts by their predicate. - */ - private final Map> factsByPredicate = new HashMap<>(); - - /** - * Index structure that holds all data source declarations of this knowledge - * base. - */ - private final Set dataSourceDeclarations = new HashSet<>(); - - /** - * Registers a listener for changes on the knowledge base - * - * @param listener - */ - public void addListener(final KnowledgeBaseListener listener) { - this.listeners.add(listener); - } - - /** - * Unregisters given listener from changes on the knowledge base - * - * @param listener - */ - public void deleteListener(final KnowledgeBaseListener listener) { - this.listeners.remove(listener); - - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - */ - public void addStatement(final Statement statement) { - if (this.doAddStatement(statement)) { - this.notifyListenersOnStatementAdded(statement); - } - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - * @return true, if the knowledge base has changed. - */ - boolean doAddStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { - this.statements.add(statement); - return true; - } - return false; - } - - /** - * Adds a collection of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Collection statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Adds a list of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Statement... statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - */ - public void removeStatement(final Statement statement) { - if (this.doRemoveStatement(statement)) { - this.notifyListenersOnStatementRemoved(statement); - } - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - * @return true, if the knowledge base has changed. - */ - boolean doRemoveStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - - if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { - this.statements.remove(statement); - return true; - } - return false; - } - - /** - * Removes a collection of statements to the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Collection statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - /** - * Removes a list of statements from the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Statement... statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - private void notifyListenersOnStatementAdded(final Statement addedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementAdded(addedStatement); - } - } - - private void notifyListenersOnStatementsAdded(final List addedStatements) { - if (!addedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsAdded(addedStatements); - } - } - } - - private void notifyListenersOnStatementRemoved(final Statement removedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementRemoved(removedStatement); - } - } - - private void notifyListenersOnStatementsRemoved(final List removedStatements) { - if (!removedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsRemoved(removedStatements); - } - } - } - - /** - * Get the list of all rules that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete rules. - * - * @return list of {@link Rule}s - */ - public List getRules() { - return this.getStatementsByType(Rule.class); - } - - /** - * Get the list of all facts that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete facts. - * - * @return list of {@link Fact}s - */ - public List getFacts() { - return this.getStatementsByType(Fact.class); - } - - /** - * Get the list of all data source declarations that have been added to the - * knowledge base. The list is read-only and cannot be modified to add or delete - * facts. - * - * @return list of {@link DataSourceDeclaration}s - */ - public List getDataSourceDeclarations() { - return this.getStatementsByType(DataSourceDeclaration.class); - } - - List getStatementsByType(final Class type) { - final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); - for (final Statement statement : this.statements) { - statement.accept(visitor); - } - return Collections.unmodifiableList(visitor.getExtractedStatements()); - } - - /** - * Add a single fact to the internal data structures. It is assumed that it has - * already been checked that this fact is not present yet. - * - * @param fact the fact to add - */ - void addFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); - this.factsByPredicate.get(predicate).add(fact); - } - - /** - * Removes a single fact from the internal data structure. It is assumed that it - * has already been checked that this fact is already present. - * - * @param fact the fact to remove - */ - void removeFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - final Set facts = this.factsByPredicate.get(predicate); - facts.remove(fact); - if (facts.isEmpty()) { - this.factsByPredicate.remove(predicate); - } - } - - /** - * Returns all {@link Statement}s of this knowledge base. - * - * The result can be iterated over and will return statements in the original - * order. The collection is read-only and cannot be modified to add or delete - * statements. - * - * @return a collection of statements - */ - public Collection getStatements() { - return Collections.unmodifiableCollection(this.statements); - } - - @Override - public Iterator iterator() { - return Collections.unmodifiableCollection(this.statements).iterator(); - } - - Map> getFactsByPredicate() { - return this.factsByPredicate; - } - -} +package org.semanticweb.vlog4j.core.reasoner; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A knowledge base with rules, facts, and declarations for loading data from + * further sources. This is a "syntactic" object in that it represents some + * information that is not relevant for the semantics of reasoning, but that is + * needed to ensure faithful re-serialisation of knowledge bases loaded from + * files (e.g., preserving order). + * + * @author Markus Kroetzsch + * + */ +public class KnowledgeBase implements Iterable { + + private final Set listeners = new HashSet<>(); + + /** + * Auxiliary class to process {@link Statement}s when added to the knowledge + * base. Returns true if a statement was added successfully. + * + * @author Markus Kroetzsch + * + */ + private class AddStatementVisitor implements StatementVisitor { + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.addFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.add(statement); + return true; + } + } + + private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + + /** + * Auxiliary class to process {@link Statement}s when removed from the knowledge + * base. Returns true if a statement was removed successfully. + * + * @author Irina Dragoste + * + */ + private class RemoveStatementVisitor implements StatementVisitor { + + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.removeFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.remove(statement); + return true; + } + } + + private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); + + private class ExtractStatementsVisitor implements StatementVisitor { + + final ArrayList extracted = new ArrayList<>(); + final Class ownType; + + ExtractStatementsVisitor(final Class type) { + this.ownType = type; + } + + ArrayList getExtractedStatements() { + return this.extracted; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Fact statement) { + if (this.ownType.equals(Fact.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Rule statement) { + if (this.ownType.equals(Rule.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final DataSourceDeclaration statement) { + if (this.ownType.equals(DataSourceDeclaration.class)) { + this.extracted.add((T) statement); + } + return null; + } + } + + /** + * The primary storage for the contents of the knowledge base. + */ + private final LinkedHashSet statements = new LinkedHashSet<>(); + +// TODO support prefixes +// /** +// * Known prefixes that can be used to pretty-print the contents of the knowledge +// * base. We try to preserve user-provided prefixes found in files when loading +// * data. +// */ +// PrefixDeclarations prefixDeclarations; + + /** + * Index structure that organises all facts by their predicate. + */ + private final Map> factsByPredicate = new HashMap<>(); + + /** + * Index structure that holds all data source declarations of this knowledge + * base. + */ + private final Set dataSourceDeclarations = new HashSet<>(); + + /** + * Registers a listener for changes on the knowledge base + * + * @param listener + */ + public void addListener(final KnowledgeBaseListener listener) { + this.listeners.add(listener); + } + + /** + * Unregisters given listener from changes on the knowledge base + * + * @param listener + */ + public void deleteListener(final KnowledgeBaseListener listener) { + this.listeners.remove(listener); + + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + */ + public void addStatement(final Statement statement) { + if (this.doAddStatement(statement)) { + this.notifyListenersOnStatementAdded(statement); + } + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + * @return true, if the knowledge base has changed. + */ + boolean doAddStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); + return true; + } + return false; + } + + /** + * Adds a collection of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Collection statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Adds a list of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Statement... statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + */ + public void removeStatement(final Statement statement) { + if (this.doRemoveStatement(statement)) { + this.notifyListenersOnStatementRemoved(statement); + } + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + * @return true, if the knowledge base has changed. + */ + boolean doRemoveStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + + if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { + this.statements.remove(statement); + return true; + } + return false; + } + + /** + * Removes a collection of statements to the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Collection statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + /** + * Removes a list of statements from the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Statement... statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + private void notifyListenersOnStatementAdded(final Statement addedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementAdded(addedStatement); + } + } + + private void notifyListenersOnStatementsAdded(final List addedStatements) { + if (!addedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } + } + } + + private void notifyListenersOnStatementRemoved(final Statement removedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementRemoved(removedStatement); + } + } + + private void notifyListenersOnStatementsRemoved(final List removedStatements) { + if (!removedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsRemoved(removedStatements); + } + } + } + + /** + * Get the list of all rules that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete rules. + * + * @return list of {@link Rule}s + */ + public List getRules() { + return this.getStatementsByType(Rule.class); + } + + /** + * Get the list of all facts that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete facts. + * + * @return list of {@link Fact}s + */ + public List getFacts() { + return this.getStatementsByType(Fact.class); + } + + /** + * Get the list of all data source declarations that have been added to the + * knowledge base. The list is read-only and cannot be modified to add or delete + * facts. + * + * @return list of {@link DataSourceDeclaration}s + */ + public List getDataSourceDeclarations() { + return this.getStatementsByType(DataSourceDeclaration.class); + } + + List getStatementsByType(final Class type) { + final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); + for (final Statement statement : this.statements) { + statement.accept(visitor); + } + return Collections.unmodifiableList(visitor.getExtractedStatements()); + } + + /** + * Add a single fact to the internal data structures. It is assumed that it has + * already been checked that this fact is not present yet. + * + * @param fact the fact to add + */ + void addFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); + this.factsByPredicate.get(predicate).add(fact); + } + + /** + * Removes a single fact from the internal data structure. It is assumed that it + * has already been checked that this fact is already present. + * + * @param fact the fact to remove + */ + void removeFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + final Set facts = this.factsByPredicate.get(predicate); + facts.remove(fact); + if (facts.isEmpty()) { + this.factsByPredicate.remove(predicate); + } + } + + /** + * Returns all {@link Statement}s of this knowledge base. + * + * The result can be iterated over and will return statements in the original + * order. The collection is read-only and cannot be modified to add or delete + * statements. + * + * @return a collection of statements + */ + public Collection getStatements() { + return Collections.unmodifiableCollection(this.statements); + } + + @Override + public Iterator iterator() { + return Collections.unmodifiableCollection(this.statements).iterator(); + } + + Map> getFactsByPredicate() { + return this.factsByPredicate; + } + +} \ No newline at end of file diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 1da67d694..cf80c90d3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; import java.io.IOException; +import java.io.OutputStream; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; @@ -86,6 +87,10 @@ static Reasoner getInstance() { */ KnowledgeBase getKnowledgeBase(); + void getKbFacts(OutputStream stream) throws IOException; + + void getKbFacts(String filePath) throws IOException; + /** * Sets the algorithm that will be used for reasoning over the knowledge base. * If no algorithm is set, the default algorithm is diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 260db93e2..e37dd9612 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,6 +1,8 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +import java.io.FileOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; @@ -20,6 +22,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -808,4 +811,51 @@ ReasonerState getReasonerState() { void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } + + @Override + public void getKbFacts(OutputStream stream) throws IOException { + // TODO Auto-generated method stub + HashSet headLiterals = new HashSet(); + for (Rule rule : this.knowledgeBase.getRules()) { + for (PositiveLiteral positiveLiteral : rule.getHead()) { + headLiterals.add(positiveLiteral); + } + } + for (PositiveLiteral positiveliteral : headLiterals) { + try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { + while (queryAnswers.hasNext()) { + QueryResult queryAnswer = queryAnswers.next(); + stream.write((positiveliteral.getPredicate().getName() + + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") + .getBytes()); + } + } + } + stream.close(); + + } + + @Override + public void getKbFacts(String filePath) throws IOException { + // TODO Auto-generated method stub + OutputStream stream = new FileOutputStream(filePath); + HashSet headLiterals = new HashSet(); + for (Rule rule : this.knowledgeBase.getRules()) { + for (PositiveLiteral positiveLiteral : rule.getHead()) { + headLiterals.add(positiveLiteral); + } + } + for (PositiveLiteral positiveliteral : headLiterals) { + try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { + while (queryAnswers.hasNext()) { + QueryResult queryAnswer = queryAnswers.next(); + stream.write((positiveliteral.getPredicate().getName() + + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") + .getBytes()); + } + } + } + stream.close(); + } + } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java index 5b5875a63..b269047b7 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java @@ -78,7 +78,6 @@ public static void main(final String[] args) throws IOException { try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - /* Execute some queries */ ExamplesUtils.printOutQueryAnswers("address(?Org, ?Street, ?ZIP, ?City)", reasoner); ExamplesUtils.printOutQueryAnswers("locatedIn(?place, europe)", reasoner); From a9c9df8a52eef17c31bcb602ffafde14caba5469 Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 8 Jan 2020 13:17:59 +0100 Subject: [PATCH 0715/1255] added javadoc and methods for facts --- .../vlog4j/core/reasoner/Reasoner.java | 12 +++++++++++ .../reasoner/implementation/VLogReasoner.java | 21 ++----------------- 2 files changed, 14 insertions(+), 19 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index cf80c90d3..f04b197ea 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -87,8 +87,20 @@ static Reasoner getInstance() { */ KnowledgeBase getKnowledgeBase(); + /** + * Exports all the facts of the knowledge base to an OutputStream. This includes + * the added facts and the inferred facts as well. + * + * @param an OutpumStream for the facts to be written to. + */ void getKbFacts(OutputStream stream) throws IOException; + /** + * Exports all the facts of the knowledge base to a desired file. This includes + * the added facts and the inferred facts as well. + * + * @param a String of the file path for the facts to be written to. + */ void getKbFacts(String filePath) throws IOException; /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index e37dd9612..e6b75b2b4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -839,23 +839,6 @@ public void getKbFacts(OutputStream stream) throws IOException { public void getKbFacts(String filePath) throws IOException { // TODO Auto-generated method stub OutputStream stream = new FileOutputStream(filePath); - HashSet headLiterals = new HashSet(); - for (Rule rule : this.knowledgeBase.getRules()) { - for (PositiveLiteral positiveLiteral : rule.getHead()) { - headLiterals.add(positiveLiteral); - } - } - for (PositiveLiteral positiveliteral : headLiterals) { - try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { - while (queryAnswers.hasNext()) { - QueryResult queryAnswer = queryAnswers.next(); - stream.write((positiveliteral.getPredicate().getName() - + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") - .getBytes()); - } - } - } - stream.close(); + getKbFacts(stream); } - -} +} \ No newline at end of file From e4806c3791889b09cffa913710aafff1f562062d Mon Sep 17 00:00:00 2001 From: alloka Date: Wed, 8 Jan 2020 13:22:23 +0100 Subject: [PATCH 0716/1255] added new line --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index e6b75b2b4..b5dc3c20b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -841,4 +841,4 @@ public void getKbFacts(String filePath) throws IOException { OutputStream stream = new FileOutputStream(filePath); getKbFacts(stream); } -} \ No newline at end of file +} From ce698ffb0f1ed77a62bc2450cbd01c04e09bc55f Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 9 Jan 2020 17:08:32 +0100 Subject: [PATCH 0717/1255] fixed javadoc --- .../semanticweb/vlog4j/core/reasoner/Reasoner.java | 12 +++++------- .../core/reasoner/implementation/VLogReasoner.java | 8 +++----- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index f04b197ea..65180c59f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,20 +88,18 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the facts of the knowledge base to an OutputStream. This includes - * the added facts and the inferred facts as well. + * Exports the inferred facts of the knowledge base to an OutputStream. * - * @param an OutpumStream for the facts to be written to. + * @param an OutputStream for the facts to be written to. */ - void getKbFacts(OutputStream stream) throws IOException; + void writeInferredFacts(OutputStream stream) throws IOException; /** - * Exports all the facts of the knowledge base to a desired file. This includes - * the added facts and the inferred facts as well. + * Exports the inferred facts of the knowledge base to a desired file. * * @param a String of the file path for the facts to be written to. */ - void getKbFacts(String filePath) throws IOException; + void writeInferredFacts(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b5dc3c20b..53eb1f706 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -813,8 +813,7 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void getKbFacts(OutputStream stream) throws IOException { - // TODO Auto-generated method stub + public void writeInferredFacts(OutputStream stream) throws IOException { HashSet headLiterals = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (PositiveLiteral positiveLiteral : rule.getHead()) { @@ -836,9 +835,8 @@ public void getKbFacts(OutputStream stream) throws IOException { } @Override - public void getKbFacts(String filePath) throws IOException { - // TODO Auto-generated method stub + public void writeInferredFacts(String filePath) throws IOException { OutputStream stream = new FileOutputStream(filePath); - getKbFacts(stream); + writeInferredFacts(stream); } } From 2c2c82f35d9e9a4341139a15a67e49b3bd709d00 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 18 Jan 2020 15:25:53 +0100 Subject: [PATCH 0718/1255] added some changes regarding method and serializer --- pom.xml | 153 +++++++++--------- .../core/model/implementation/Serializer.java | 8 +- .../reasoner/implementation/VLogReasoner.java | 33 ++-- vlog4j-examples/allola.txt | 23 +++ 4 files changed, 126 insertions(+), 91 deletions(-) create mode 100644 vlog4j-examples/allola.txt diff --git a/pom.xml b/pom.xml index 95ee3ad64..c521ee208 100644 --- a/pom.xml +++ b/pom.xml @@ -15,9 +15,8 @@ https://github.com/knowsys/vlog4j - + vlog4j-core vlog4j-rdf vlog4j-examples @@ -141,7 +140,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -205,9 +204,9 @@ - org.jacoco - jacoco-maven-plugin - 0.8.5 + org.jacoco + jacoco-maven-plugin + 0.8.5 @@ -217,7 +216,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -228,82 +227,80 @@ - org.apache.maven.plugins - maven-surefire-plugin - 3.0.0-M4 - - ${surefireArgLine} - 1 - true - + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M4 + + ${surefireArgLine} + 1 + true + - - org.eluder.coveralls - coveralls-maven-plugin - 4.3.0 - - - coverage/target/site/jacoco-aggregate/jacoco.xml - - - - - - javax.xml.bind - jaxb-api - 2.3.1 - - + + org.eluder.coveralls + coveralls-maven-plugin + 4.3.0 + + + coverage/target/site/jacoco-aggregate/jacoco.xml + + + + + + javax.xml.bind + jaxb-api + 2.3.1 + + - - org.jacoco - jacoco-maven-plugin - - - prepare-agent - - prepare-agent - - - surefireArgLine - - - - default-cli - - report - - test - - - ${project.reporting.outputDirectory}/jacoco-ut - - - - - - - **/javacc/JavaCCParser.class - **/javacc/JavaCCParserConstants.class - **/javacc/JavaCCParserTokenManager.class - **/javacc/JavaCharStream.class - **/javacc/ParseException.class - **/javacc/SimpleCharStream.class - **/javacc/Token.class - **/javacc/TokenMgrError.class - - + + org.jacoco + jacoco-maven-plugin + + + prepare-agent + + prepare-agent + + + surefireArgLine + + + + default-cli + + report + + test + + + ${project.reporting.outputDirectory}/jacoco-ut + + + + + + + **/javacc/JavaCCParser.class + **/javacc/JavaCCParserConstants.class + **/javacc/JavaCCParserTokenManager.class + **/javacc/JavaCharStream.class + **/javacc/ParseException.class + **/javacc/SimpleCharStream.class + **/javacc/Token.class + **/javacc/TokenMgrError.class + + - + org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 0e37fd672..5fd525b96 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.core.model.implementation; +import java.util.List; + import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -384,9 +386,13 @@ private static String addQuotes(final String string) { return QUOTE + string + QUOTE; } - private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } + public static String getFactOutput(Predicate predicate, List terms) { + return predicate.getName() + terms.toString().replace(OPENING_BRACKET, OPENING_PARENTHESIS) + .replace(CLOSING_BRACKET, CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + } + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 53eb1f706..478464216 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -22,15 +22,16 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; @@ -814,21 +815,29 @@ void setReasonerState(ReasonerState reasonerState) { @Override public void writeInferredFacts(OutputStream stream) throws IOException { - HashSet headLiterals = new HashSet(); + HashSet toBeQueriedHeadPredicates = new HashSet(); + for (Fact fact : this.knowledgeBase.getFacts()) { + stream.write((fact.toString() + "\n").getBytes()); + } for (Rule rule : this.knowledgeBase.getRules()) { - for (PositiveLiteral positiveLiteral : rule.getHead()) { - headLiterals.add(positiveLiteral); + for (Literal literal : rule.getHead()) { + toBeQueriedHeadPredicates.add(literal.getPredicate()); } } - for (PositiveLiteral positiveliteral : headLiterals) { - try (final QueryResultIterator queryAnswers = this.answerQuery(positiveliteral, true)) { - while (queryAnswers.hasNext()) { - QueryResult queryAnswer = queryAnswers.next(); - stream.write((positiveliteral.getPredicate().getName() - + queryAnswer.getTerms().toString().replace("[", "(").replace("]", ").") + "\n") - .getBytes()); - } + for (Predicate predicate : toBeQueriedHeadPredicates) { + ArrayList tobeGroundedVariables = new ArrayList(); + for (int i = 0; i < predicate.getArity(); i++) { + tobeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } + final QueryResultIterator answers = this + .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true); + answers.forEachRemaining(queryAnswer -> { + try { + stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); + } catch (IOException e) { + e.printStackTrace(); + } + }); } stream.close(); diff --git a/vlog4j-examples/allola.txt b/vlog4j-examples/allola.txt new file mode 100644 index 000000000..92837f326 --- /dev/null +++ b/vlog4j-examples/allola.txt @@ -0,0 +1,23 @@ +location(germany, europe) . +location(uk, europe) . +location(saxony, germany) . +location(dresden, saxony) . +city(dresden) . +country(germany) . +country(uk) . +university(tudresden, germany) . +university(uoxford, uk) . +streetAddress(tudresden, "Mommsenstraße 9", "01069", "Dresden") . +zipLocation("01069", dresden) . +locatedIn(germany, europe) . +locatedIn(uk, europe) . +locatedIn(dresden, saxony) . +locatedIn(saxony, germany) . +locatedIn(dresden, germany) . +locatedIn(dresden, europe) . +locatedIn(saxony, europe) . +locatedIn(_2_4_0, uk) . +locatedIn(_2_4_0, europe) . +address(tudresden, "Mommsenstraße 9", "01069", dresden) . +address(uoxford, _2_2_0, _2_3_0, _2_4_0) . +inEuropeOutsideGermany(uoxford) . From 5059dc161aa9341d379f3696781edb84f4827cd1 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 18 Jan 2020 15:26:33 +0100 Subject: [PATCH 0719/1255] added some changes regarding method and serializer --- vlog4j-examples/allola.txt | 23 ----------------------- 1 file changed, 23 deletions(-) delete mode 100644 vlog4j-examples/allola.txt diff --git a/vlog4j-examples/allola.txt b/vlog4j-examples/allola.txt deleted file mode 100644 index 92837f326..000000000 --- a/vlog4j-examples/allola.txt +++ /dev/null @@ -1,23 +0,0 @@ -location(germany, europe) . -location(uk, europe) . -location(saxony, germany) . -location(dresden, saxony) . -city(dresden) . -country(germany) . -country(uk) . -university(tudresden, germany) . -university(uoxford, uk) . -streetAddress(tudresden, "Mommsenstraße 9", "01069", "Dresden") . -zipLocation("01069", dresden) . -locatedIn(germany, europe) . -locatedIn(uk, europe) . -locatedIn(dresden, saxony) . -locatedIn(saxony, germany) . -locatedIn(dresden, germany) . -locatedIn(dresden, europe) . -locatedIn(saxony, europe) . -locatedIn(_2_4_0, uk) . -locatedIn(_2_4_0, europe) . -address(tudresden, "Mommsenstraße 9", "01069", dresden) . -address(uoxford, _2_2_0, _2_3_0, _2_4_0) . -inEuropeOutsideGermany(uoxford) . From bd20d6418b4346aa0098230ba03139f6f1486b8b Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 24 Jan 2020 02:38:28 +0100 Subject: [PATCH 0720/1255] added unit test --- .../reasoner/implementation/VLogReasoner.java | 12 +- .../vlog4j/syntax/parser/RuleParserTest.java | 104 +++++++++++++++++- 2 files changed, 106 insertions(+), 10 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 478464216..7992dafb5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -816,14 +816,15 @@ void setReasonerState(ReasonerState reasonerState) { @Override public void writeInferredFacts(OutputStream stream) throws IOException { HashSet toBeQueriedHeadPredicates = new HashSet(); - for (Fact fact : this.knowledgeBase.getFacts()) { - stream.write((fact.toString() + "\n").getBytes()); - } for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); } } + for (Fact fact : this.knowledgeBase.getFacts()) { + if (!toBeQueriedHeadPredicates.contains(fact.getPredicate())) + stream.write((fact.toString() + "\n").getBytes()); + } for (Predicate predicate : toBeQueriedHeadPredicates) { ArrayList tobeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { @@ -834,13 +835,14 @@ public void writeInferredFacts(OutputStream stream) throws IOException { answers.forEachRemaining(queryAnswer -> { try { stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); - } catch (IOException e) { + } + + catch (IOException e) { e.printStackTrace(); } }); } stream.close(); - } @Override diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 971202610..6a62099eb 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,27 +19,43 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.IOException; +import java.io.OutputStream; +import java.net.URL; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import org.junit.Test; import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; +import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -438,4 +454,82 @@ public void testCustomDatatype() throws ParsingException { DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; assertEquals(constant, result); } + + @Test + public void testGetFacts() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + final DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + final Predicate predicate1 = Expressions.makePredicate("country", 1); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + final DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + final Predicate predicate2 = Expressions.makePredicate("inEuropeOutsideGermany", 1); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + final DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", + "?var2 wdt:P31 wd:Q5 ."); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); + final Predicate predicate4 = Expressions.makePredicate("city", 1); + final DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + RuleParser.parseInto(kb, fact.toString() + "."); + RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); + RuleParser.parseInto(kb, dataSourceDeclaration4.toString()); + final String rules = "location(germany,europe). \n" // + + "location(saxony,germany). \n" // + + "location(dresden,saxony). \n" // + + "locatedIn(Egypt,Africa). \n" // + + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + + "city(dresden). \n" // + + "country(germany). \n" // + + "university(tudresden, germany). \n" // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // + + "zipLocation(\"01069\", dresden) . \n" // + + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; + RuleParser.parseInto(kb, rules); + final String facts = "location(germany,europe). \n" // + + "location(saxony,germany). \n" // + + "location(dresden,saxony). \n" // + + "location(germany, europe) . \n" // + + "locatedIn(Egypt, Africa). \n" // + + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + + "city(dresden). \n" // + + "country(germany). \n" // + + "university(tudresden, germany). \n" // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // + + "zipLocation(\"01069\", dresden) . \n" // + + "locatedIn(germany, europe) . \n" // + + "locatedIn(dresden, saxony) . \n" // + + "locatedIn(saxony, germany) . \n" // + + "locatedIn(dresden, germany) . \n" // + + "locatedIn(dresden, europe) . \n" // + + "locatedIn(saxony, europe) . \n" // + + "address(tudresden, \"Mommsenstraße 9\", \"01069\", dresden) . \n" + + "() . \n"; + KnowledgeBase kb2 = new KnowledgeBase(); + KnowledgeBase kb3 = new KnowledgeBase(); + RuleParser.parseInto(kb2, facts); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + File file = new File("test.txt"); + OutputStream stream = new FileOutputStream(file); + reasoner.writeInferredFacts(stream); + stream.flush(); + BufferedReader input = new BufferedReader(new FileReader(file)); + String factString = ""; + while ((factString = input.readLine()) != null) { + if (!factString.contains("_")) + RuleParser.parseInto(kb3, factString); + } + input.close(); + assertEquals(new HashSet(kb2.getFacts()), new HashSet(kb3.getFacts())); + file.delete(); + + } + + } } From 49830490a745122612b5ab1c540f262422812c2a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 12:00:48 +0100 Subject: [PATCH 0721/1255] rename file --- .../implementation/ExtensionSizeTest.java | 206 ------------------ ...SizeTest.java => QueryAnswerSizeTest.java} | 0 2 files changed, 206 deletions(-) delete mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QuerySizeTest.java => QueryAnswerSizeTest.java} (100%) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java deleted file mode 100644 index 8e16694b0..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExtensionSizeTest.java +++ /dev/null @@ -1,206 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; - -import java.io.IOException; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; - -public class ExtensionSizeTest { - - private static final Predicate predP = Expressions.makePredicate("P", 1); - private static final Predicate predQ = Expressions.makePredicate("Q", 1); - private static final Predicate predR = Expressions.makePredicate("R", 2); - private static final Variable x = Expressions.makeUniversalVariable("x"); - private static final Variable y = Expressions.makeExistentialVariable("y"); - private static final Constant c = Expressions.makeAbstractConstant("c"); - private static final Constant d = Expressions.makeAbstractConstant("d"); - private static final Constant e = Expressions.makeAbstractConstant("e"); - private static final Constant f = Expressions.makeAbstractConstant("f"); - - private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); - private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); - private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); - private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); - private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); - private static final Conjunction conPx = Expressions.makeConjunction(Px); - - private static final Rule QxPx = Expressions.makeRule(Qx, Px); - private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); - - private static final Fact factPc = Expressions.makeFact(predP, c); - private static final Fact factPd = Expressions.makeFact(predP, d); - - private static final Fact factQe = Expressions.makeFact(predQ, e); - private static final Fact factQf = Expressions.makeFact(predQ, f); - - private static final PositiveLiteral Rdy = Expressions.makePositiveLiteral(predR, d, y); - private static final PositiveLiteral Rxd = Expressions.makePositiveLiteral(predR, x, d); - private static final PositiveLiteral Rxe = Expressions.makePositiveLiteral(predR, x, e); - - @Test - public void noFactsnoRules() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - } - } - - @Test - public void noFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(QxPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(0, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void noFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(0, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsNoRules() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(0, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, QxPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void qFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factQe, factQf, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void qFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factQe, factQf, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(0, reasoner.getExtensionSize(Px)); - assertEquals(2, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsQFactsUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, QxPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(4, reasoner.getExtensionSize(Qx)); - assertEquals(0, reasoner.getExtensionSize(Rxy)); - } - } - - @Test - public void pFactsQFactsExistentialRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(4, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Rxy)); - - assertEquals(2, reasoner.getExtensionSize(Rdy)); - assertEquals(2, reasoner.getExtensionSize(Rxe)); - } - } - - @Test - public void pFactsQFactsExistentialAndUniversalRule() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - assertEquals(2, reasoner.getExtensionSize(Px)); - assertEquals(6, reasoner.getExtensionSize(Qx)); - assertEquals(2, reasoner.getExtensionSize(Rxy)); - - assertEquals(2, reasoner.getExtensionSize(Rdy)); - assertEquals(2, reasoner.getExtensionSize(Rxd)); - } - } -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java similarity index 100% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QuerySizeTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java From aa58e430663726a04584adf6c6f52925b3eb87e5 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 12:01:06 +0100 Subject: [PATCH 0722/1255] delete unused import --- .../main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java index 906d3c368..3e4f13e69 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Iterator; import java.util.List; import org.apache.log4j.ConsoleAppender; From b6cea1e44767de6c29959fbeb55a45c491991925 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 12:15:31 +0100 Subject: [PATCH 0723/1255] rename method; update javadoc --- .../vlog4j/core/reasoner/Reasoner.java | 56 ++----------------- .../reasoner/implementation/VLogReasoner.java | 16 ------ 2 files changed, 6 insertions(+), 66 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index e37561483..67de43fcf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -330,60 +330,19 @@ public static Reasoner getInstance() { */ QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); - // TODO add examples to query javadoc - /** - * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the explicit facts materialised by the reasoner, - * including nulls.
            - * An answer to the query is the terms a fact that matches the {@code query}: - * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
            - * A query answer is represented by a {@link QueryResult}. A query can have - * multiple, distinct query answers. This method returns an Iterator over these - * answers.
            - * - * Depending on the state of the reasoning (materialisation) and its - * {@link KnowledgeBase}, the answers can have a different {@link Correctness} - * ({@link QueryResultIterator#getCorrectness()}): - *
              - *
            • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current - * knowledge base has completed, and the query answers are guaranteed to be - * correct.
            • - *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed - * to be sound, but may be incomplete. This can happen - *
                - *
              • when materialisation has not completed ({@link Reasoner#reason()} returns - * {@code false}),
              • - *
              • or when the knowledge base was modified after reasoning, and the - * materialisation does not reflect the current knowledge base. - * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain - * complete query answers with respect to the current knowledge base.
              • - *
              - *
            • - *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some - * results may be unsound. This can happen when the knowledge base was modified - * and the reasoner materialisation is no longer consistent with the current - * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, - * in order to obtain correct query answers. - *
            - * - * + /* * @param query a {@link PositiveLiteral} representing the query to be answered. - * @return number of facts in the extension of the query. + * + * @return queryAnswerSize(query, true), the number of facts in the extension of + * the query. */ long queryAnswerSize(PositiveLiteral query); - // TODO add javadoc, examples - long getExtensionSize(PositiveLiteral literal); - // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the explicit facts materialised by the reasoner. + * loaded into the reasoner and the number of explicit facts materialised by + * the reasoner. *
            * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the @@ -392,9 +351,6 @@ public static Reasoner getInstance() { * the {@code query} are matched by terms in the fact, either named * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The * same variable name identifies the same term in the answer fact.
            - * A query answer is represented by a {@link QueryResult}. A query can have - * multiple, distinct query answers. This method returns an Iterator over these - * answers.
            * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index cb0b75f75..f6c6d2ca2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -621,22 +621,6 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { return result; } - @Override - public long getExtensionSize(PositiveLiteral literal) { - validateNotClosed(); - validateKBLoaded("Querying is not alowed before reasoner is loaded!"); - - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(literal); - - long result = 0; - try { - result = this.vLog.getExtensionSize(this.vLog.getPredicateId(vLogAtom.getPredicate())); - } catch (NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - return result; - } - @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { From 4d8626809f5c738bb611de39063c94baa81fca48 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Mon, 27 Jan 2020 15:44:43 +0100 Subject: [PATCH 0724/1255] add test --- .../implementation/QueryAnswerSizeTest.java | 37 ++++++++++++++++--- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index 0c2699f32..b320f1e34 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -36,7 +36,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class QuerySizeTest { +public class QueryAnswerSizeTest { private static final Predicate predP = Expressions.makePredicate("P", 1); private static final Predicate predQ = Expressions.makePredicate("Q", 1); @@ -51,12 +51,17 @@ public class QuerySizeTest { private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); + private static final PositiveLiteral Rxx = Expressions.makePositiveLiteral(predR, x, x); private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); + private static final PositiveLiteral Ryy = Expressions.makePositiveLiteral(predR, y, y); + private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); + private static final Conjunction conRxxRxyRyy = Expressions.makePositiveConjunction(Rxx, Rxy, Ryy); private static final Conjunction conPx = Expressions.makeConjunction(Px); - private static final Rule ruleQxPx = Expressions.makeRule(Qx, Px); + private static final Rule QxPx = Expressions.makeRule(Qx, Px); private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); + private static final Rule RxxRxyRyyPx = Expressions.makeRule(conRxxRxyRyy, conPx); private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); @@ -83,7 +88,7 @@ public void noFactsnoRules() throws IOException { @Test public void noFactsUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(ruleQxPx); + kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(0, reasoner.queryAnswerSize(Px, true)); @@ -119,7 +124,7 @@ public void pFactsNoRules() throws IOException { @Test public void pFactsUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, ruleQxPx); + kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(2, reasoner.queryAnswerSize(Px, true)); @@ -188,7 +193,7 @@ public void qFactsExistentialRule() throws IOException { @Test public void pFactsQFactsUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(2, reasoner.queryAnswerSize(Px)); @@ -234,7 +239,7 @@ public void pFactsQFactsExistentialRule() throws IOException { @Test public void pFactsQFactsExistentialAndUniversalRule() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(factPc, factPd, factQe, factQf, ruleQxPx, RxyQyPx); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); assertEquals(2, reasoner.queryAnswerSize(Px)); @@ -259,4 +264,24 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { } } + @Test + public void pFactsLiteralWithSameVariables() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxxRxyRyyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.queryAnswerSize(Px, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, false)); + + assertEquals(4, reasoner.queryAnswerSize(Rxx, true)); + assertEquals(2, reasoner.queryAnswerSize(Rxx, false)); + + assertEquals(6, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(2, reasoner.queryAnswerSize(Rxy, false)); + + assertEquals(4, reasoner.queryAnswerSize(Ryy, true)); + assertEquals(2, reasoner.queryAnswerSize(Ryy, false)); + + } + } } From e4fdbfa41dcdabe1a86ba7647e9044a0fe2dbefd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Larry=20Gonz=C3=A1lez?= Date: Mon, 27 Jan 2020 16:36:33 +0100 Subject: [PATCH 0725/1255] fix typo --- .../org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index f3b3129c2..20a82119b 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -73,7 +73,7 @@ public static void main(final String[] args) throws ParsingException, IOExceptio + "@prefix wdqs: ." // + "@prefix dbp: ." // + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') ." // - + "@source wdResult[2]) : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + + "@source wdResult[2] : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') ." // + "% Rules:\n" // + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage)." // + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage)." // From 52a86d9b5a34693557830afacab0084d9903d1d0 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 30 Jan 2020 17:44:46 +0100 Subject: [PATCH 0726/1255] added some changes --- .../core/model/implementation/Serializer.java | 6 ++-- .../vlog4j/core/reasoner/Reasoner.java | 8 ++--- .../reasoner/implementation/VLogReasoner.java | 35 ++++++++++--------- .../vlog4j/syntax/parser/RuleParserTest.java | 2 +- 4 files changed, 27 insertions(+), 24 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 5fd525b96..a089d9815 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -391,8 +391,10 @@ private static String addAngleBrackets(final String string) { } public static String getFactOutput(Predicate predicate, List terms) { - return predicate.getName() + terms.toString().replace(OPENING_BRACKET, OPENING_PARENTHESIS) - .replace(CLOSING_BRACKET, CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + return getIRIString(predicate.getName()) + + terms.toString().replace(terms.toString().substring(0, 1), OPENING_PARENTHESIS).replace( + terms.toString().substring(terms.toString().length() - 1, terms.toString().length()), + CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 65180c59f..b69847a2c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,18 +88,18 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports the inferred facts of the knowledge base to an OutputStream. + * Exports all the facts of the knowledge base to an OutputStream. * * @param an OutputStream for the facts to be written to. */ - void writeInferredFacts(OutputStream stream) throws IOException; + void writeFacts(OutputStream stream) throws IOException; /** - * Exports the inferred facts of the knowledge base to a desired file. + * Exports all the facts of the knowledge base to a desired file. * * @param a String of the file path for the facts to be written to. */ - void writeInferredFacts(String filePath) throws IOException; + void writeFacts(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 7992dafb5..c4510263d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -814,8 +814,8 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void writeInferredFacts(OutputStream stream) throws IOException { - HashSet toBeQueriedHeadPredicates = new HashSet(); + public void writeFacts(OutputStream stream) throws IOException { + Set toBeQueriedHeadPredicates = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); @@ -826,28 +826,29 @@ public void writeInferredFacts(OutputStream stream) throws IOException { stream.write((fact.toString() + "\n").getBytes()); } for (Predicate predicate : toBeQueriedHeadPredicates) { - ArrayList tobeGroundedVariables = new ArrayList(); + List tobeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { tobeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } - final QueryResultIterator answers = this - .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true); - answers.forEachRemaining(queryAnswer -> { - try { - stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); - } + try (final QueryResultIterator answers = this + .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true)) { + answers.forEachRemaining(queryAnswer -> { + try { + stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); + } catch (IOException e) { + throw new RuntimeException(); + } + }); + + } - catch (IOException e) { - e.printStackTrace(); - } - }); } - stream.close(); } @Override - public void writeInferredFacts(String filePath) throws IOException { - OutputStream stream = new FileOutputStream(filePath); - writeInferredFacts(stream); + public void writeFacts(String filePath) throws IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + writeFacts(stream); + } } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 6a62099eb..2b7078e0d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -517,7 +517,7 @@ public void testGetFacts() throws ParsingException, IOException { reasoner.reason(); File file = new File("test.txt"); OutputStream stream = new FileOutputStream(file); - reasoner.writeInferredFacts(stream); + reasoner.writeFacts(stream); stream.flush(); BufferedReader input = new BufferedReader(new FileReader(file)); String factString = ""; From e3f34a4da5c5fd868e3269480c25fcc0b0c52c71 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 1 Feb 2020 23:31:43 +0100 Subject: [PATCH 0727/1255] added facts from datasources --- .../reasoner/implementation/VLogReasoner.java | 4 ++ .../vlog4j/syntax/parser/RuleParserTest.java | 45 +++++-------------- 2 files changed, 16 insertions(+), 33 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index c4510263d..581e12a1b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -821,10 +821,14 @@ public void writeFacts(OutputStream stream) throws IOException { toBeQueriedHeadPredicates.add(literal.getPredicate()); } } + for (DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { + toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); + } for (Fact fact : this.knowledgeBase.getFacts()) { if (!toBeQueriedHeadPredicates.contains(fact.getPredicate())) stream.write((fact.toString() + "\n").getBytes()); } + for (Predicate predicate : toBeQueriedHeadPredicates) { List tobeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 2b7078e0d..4e8c55cb3 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -52,8 +52,10 @@ import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; @@ -458,50 +460,27 @@ public void testCustomDatatype() throws ParsingException { @Test public void testGetFacts() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - final DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - final Predicate predicate1 = Expressions.makePredicate("country", 1); - final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); - final DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", - "?var wdt:P31 wd:Q5 ."); - final Predicate predicate2 = Expressions.makePredicate("inEuropeOutsideGermany", 1); - final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); - final DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", - "?var2 wdt:P31 wd:Q5 ."); - final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); - final Predicate predicate4 = Expressions.makePredicate("city", 1); - final DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + final InMemoryDataSource locations = new InMemoryDataSource(2, 3); + locations.addTuple("germany", "europe"); + locations.addTuple("saxony", "germany"); + locations.addTuple("dresden", "saxony"); RuleParser.parseInto(kb, fact.toString() + "."); - RuleParser.parseInto(kb, dataSourceDeclaration1.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration2.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration3.toString()); - RuleParser.parseInto(kb, dataSourceDeclaration4.toString()); - final String rules = "location(germany,europe). \n" // - + "location(saxony,germany). \n" // - + "location(dresden,saxony). \n" // - + "locatedIn(Egypt,Africa). \n" // + final String sharedFacts = "locatedIn(Egypt,Africa). \n" // + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + "city(dresden). \n" // + "country(germany). \n" // + "university(tudresden, germany). \n" // + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // - + "zipLocation(\"01069\", dresden) . \n" // - + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + + "zipLocation(\"01069\", dresden) . \n"; + final String rules = sharedFacts + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; RuleParser.parseInto(kb, rules); - final String facts = "location(germany,europe). \n" // + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); + final String facts = sharedFacts + "location(dresden,saxony). \n" // + + "location(germany,europe). \n" // + "location(saxony,germany). \n" // - + "location(dresden,saxony). \n" // - + "location(germany, europe) . \n" // - + "locatedIn(Egypt, Africa). \n" // - + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // - + "city(dresden). \n" // - + "country(germany). \n" // - + "university(tudresden, germany). \n" // - + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // - + "zipLocation(\"01069\", dresden) . \n" // + "locatedIn(germany, europe) . \n" // + "locatedIn(dresden, saxony) . \n" // + "locatedIn(saxony, germany) . \n" // From 5ee1b2e2a4a6b7ef9b7c33a90b42c34aa5959627 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 1 Feb 2020 23:38:32 +0100 Subject: [PATCH 0728/1255] removed unused imports --- .../semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 4e8c55cb3..b1f6c702b 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -29,7 +29,6 @@ import java.io.FileReader; import java.io.IOException; import java.io.OutputStream; -import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -38,13 +37,10 @@ import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Conjunction; import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; @@ -52,11 +48,9 @@ import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; From 59f6bab8454a4dc0230dd84a6fdfa1786ab743e6 Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 7 Feb 2020 11:17:15 +0100 Subject: [PATCH 0729/1255] added some changes --- .../core/model/implementation/Serializer.java | 20 ++++++++++++++----- .../vlog4j/core/reasoner/Reasoner.java | 4 ++-- .../reasoner/implementation/VLogReasoner.java | 13 ++++++------ .../vlog4j/syntax/parser/RuleParserTest.java | 2 +- 4 files changed, 24 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a089d9815..41314fda7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -390,11 +390,21 @@ private static String addAngleBrackets(final String string) { return LESS_THAN + string + MORE_THAN; } - public static String getFactOutput(Predicate predicate, List terms) { - return getIRIString(predicate.getName()) - + terms.toString().replace(terms.toString().substring(0, 1), OPENING_PARENTHESIS).replace( - terms.toString().substring(terms.toString().length() - 1, terms.toString().length()), - CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + public static String getFactString(Predicate predicate, List terms) { + StringBuilder stringBuilder = new StringBuilder(""); + stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); + boolean first = true; + for (Term term : terms) { + if (first) { + first = false; + } else { + stringBuilder.append(COMMA); + } + final String string = term.getSyntacticRepresentation(); + stringBuilder.append(string); + } + stringBuilder.append(CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + return stringBuilder.toString(); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index b69847a2c..598fea327 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -92,14 +92,14 @@ static Reasoner getInstance() { * * @param an OutputStream for the facts to be written to. */ - void writeFacts(OutputStream stream) throws IOException; + void writeInferences(OutputStream stream) throws IOException; /** * Exports all the facts of the knowledge base to a desired file. * * @param a String of the file path for the facts to be written to. */ - void writeFacts(String filePath) throws IOException; + void writeInferences(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 581e12a1b..2323b2312 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -814,7 +814,7 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void writeFacts(OutputStream stream) throws IOException { + public void writeInferences(OutputStream stream) throws IOException { Set toBeQueriedHeadPredicates = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { @@ -825,8 +825,7 @@ public void writeFacts(OutputStream stream) throws IOException { toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); } for (Fact fact : this.knowledgeBase.getFacts()) { - if (!toBeQueriedHeadPredicates.contains(fact.getPredicate())) - stream.write((fact.toString() + "\n").getBytes()); + toBeQueriedHeadPredicates.add(fact.getPredicate()); } for (Predicate predicate : toBeQueriedHeadPredicates) { @@ -838,9 +837,9 @@ public void writeFacts(OutputStream stream) throws IOException { .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true)) { answers.forEachRemaining(queryAnswer -> { try { - stream.write(Serializer.getFactOutput(predicate, queryAnswer.getTerms()).getBytes()); + stream.write(Serializer.getFactString(predicate, queryAnswer.getTerms()).getBytes()); } catch (IOException e) { - throw new RuntimeException(); + throw new RuntimeException(e); } }); @@ -850,9 +849,9 @@ public void writeFacts(OutputStream stream) throws IOException { } @Override - public void writeFacts(String filePath) throws IOException { + public void writeInferences(String filePath) throws IOException { try (OutputStream stream = new FileOutputStream(filePath)) { - writeFacts(stream); + writeInferences(stream); } } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index b1f6c702b..35f34c29c 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -490,7 +490,7 @@ public void testGetFacts() throws ParsingException, IOException { reasoner.reason(); File file = new File("test.txt"); OutputStream stream = new FileOutputStream(file); - reasoner.writeFacts(stream); + reasoner.writeInferences(stream); stream.flush(); BufferedReader input = new BufferedReader(new FileReader(file)); String factString = ""; From 399dda1195ba0fcec657f49be1d4b785c260ee33 Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 7 Feb 2020 13:49:47 +0100 Subject: [PATCH 0730/1255] added changes in unit test --- .../vlog4j/syntax/parser/RuleParserTest.java | 57 ++++++------------- 1 file changed, 17 insertions(+), 40 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 35f34c29c..a10ef80bc 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -24,14 +24,13 @@ import static org.mockito.Mockito.mock; import java.io.BufferedReader; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileReader; +import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.OutputStream; +import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; +import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -452,56 +451,34 @@ public void testCustomDatatype() throws ParsingException { } @Test - public void testGetFacts() throws ParsingException, IOException { + public void testWriteInferences() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); - final InMemoryDataSource locations = new InMemoryDataSource(2, 3); - locations.addTuple("germany", "europe"); - locations.addTuple("saxony", "germany"); - locations.addTuple("dresden", "saxony"); + final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + locations.addTuple("dresden", "germany"); RuleParser.parseInto(kb, fact.toString() + "."); - final String sharedFacts = "locatedIn(Egypt,Africa). \n" // + final String rules = "locatedIn(Egypt,Africa). \n" // + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + "city(dresden). \n" // + "country(germany). \n" // + "university(tudresden, germany). \n" // - + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // - + "zipLocation(\"01069\", dresden) . \n"; - final String rules = sharedFacts + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // - + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // - + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; RuleParser.parseInto(kb, rules); kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); - final String facts = sharedFacts + "location(dresden,saxony). \n" // - + "location(germany,europe). \n" // - + "location(saxony,germany). \n" // - + "locatedIn(germany, europe) . \n" // - + "locatedIn(dresden, saxony) . \n" // - + "locatedIn(saxony, germany) . \n" // - + "locatedIn(dresden, germany) . \n" // - + "locatedIn(dresden, europe) . \n" // - + "locatedIn(saxony, europe) . \n" // - + "address(tudresden, \"Mommsenstraße 9\", \"01069\", dresden) . \n" - + "() . \n"; - KnowledgeBase kb2 = new KnowledgeBase(); - KnowledgeBase kb3 = new KnowledgeBase(); - RuleParser.parseInto(kb2, facts); + List inferences = new ArrayList(); try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - File file = new File("test.txt"); - OutputStream stream = new FileOutputStream(file); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); reasoner.writeInferences(stream); stream.flush(); - BufferedReader input = new BufferedReader(new FileReader(file)); - String factString = ""; - while ((factString = input.readLine()) != null) { - if (!factString.contains("_")) - RuleParser.parseInto(kb3, factString); - } - input.close(); - assertEquals(new HashSet(kb2.getFacts()), new HashSet(kb3.getFacts())); - file.delete(); + try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { + String factString = ""; + while ((factString = input.readLine()) != null) { + inferences.add(factString); + } + } + assertEquals(10, inferences.size()); } } From b535a8c26b711c3b8c752c66a37e736e48174664 Mon Sep 17 00:00:00 2001 From: alloka Date: Fri, 7 Feb 2020 13:53:14 +0100 Subject: [PATCH 0731/1255] removed unused imports --- .../org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index a10ef80bc..abca5ffd7 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -29,7 +29,6 @@ import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashSet; import java.util.List; import org.junit.Test; From 424a2850c25449fcaf83ad9a5a841af39fb7ea10 Mon Sep 17 00:00:00 2001 From: alloka Date: Sat, 8 Feb 2020 03:24:22 +0100 Subject: [PATCH 0732/1255] merged code in serializer --- .../core/model/implementation/Serializer.java | 22 +++++++------------ 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 41314fda7..e344d567e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -132,19 +132,7 @@ public static String getString(final Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - stringBuilder.append(getIRIString(literal.getPredicate().getName())).append(OPENING_PARENTHESIS); - boolean first = true; - for (final Term term : literal.getArguments()) { - if (first) { - first = false; - } else { - stringBuilder.append(COMMA); - } - final String string = term.getSyntacticRepresentation(); - stringBuilder.append(string); - } - stringBuilder.append(CLOSING_PARENTHESIS); - return stringBuilder.toString(); + return getTermsString(literal.getArguments(), stringBuilder, literal.getPredicate()); } /** @@ -392,6 +380,11 @@ private static String addAngleBrackets(final String string) { public static String getFactString(Predicate predicate, List terms) { StringBuilder stringBuilder = new StringBuilder(""); + return getTermsString(terms, stringBuilder, predicate) + STATEMENT_SEPARATOR + "\n"; + + } + + public static String getTermsString(List terms, StringBuilder stringBuilder, Predicate predicate) { stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); boolean first = true; for (Term term : terms) { @@ -403,8 +396,9 @@ public static String getFactString(Predicate predicate, List terms) { final String string = term.getSyntacticRepresentation(); stringBuilder.append(string); } - stringBuilder.append(CLOSING_PARENTHESIS + STATEMENT_SEPARATOR + "\n"); + stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); + } } From 54b68d1945133edca9047f5dfa2a6a3aeb3509fb Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 10 Feb 2020 17:41:15 +0100 Subject: [PATCH 0733/1255] added some changes --- .../core/model/implementation/Serializer.java | 9 +++++---- .../semanticweb/vlog4j/core/reasoner/Reasoner.java | 6 ++++-- .../core/reasoner/implementation/VLogReasoner.java | 13 ++++++++----- .../vlog4j/syntax/parser/RuleParserTest.java | 4 ++-- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index e344d567e..09b6f7981 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -132,7 +132,8 @@ public static String getString(final Literal literal) { if (literal.isNegated()) { stringBuilder.append(NEGATIVE_IDENTIFIER); } - return getTermsString(literal.getArguments(), stringBuilder, literal.getPredicate()); + stringBuilder.append(getString(literal.getPredicate(), literal.getArguments())); + return stringBuilder.toString(); } /** @@ -379,12 +380,12 @@ private static String addAngleBrackets(final String string) { } public static String getFactString(Predicate predicate, List terms) { - StringBuilder stringBuilder = new StringBuilder(""); - return getTermsString(terms, stringBuilder, predicate) + STATEMENT_SEPARATOR + "\n"; + return getString(predicate, terms) + STATEMENT_SEPARATOR + "\n"; } - public static String getTermsString(List terms, StringBuilder stringBuilder, Predicate predicate) { + public static String getString(Predicate predicate, List terms) { + StringBuilder stringBuilder = new StringBuilder(""); stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); boolean first = true; for (Term term : terms) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 598fea327..62fb60c8a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,14 +88,16 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the facts of the knowledge base to an OutputStream. + * Exports all the (explicit and implicit) facts of the knowledge base to an + * OutputStream. * * @param an OutputStream for the facts to be written to. */ void writeInferences(OutputStream stream) throws IOException; /** - * Exports all the facts of the knowledge base to a desired file. + * Exports all the (explicit and implicit) facts of the knowledge base to a + * desired file. * * @param a String of the file path for the facts to be written to. */ diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 2323b2312..0ed188ff0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -22,6 +22,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -815,6 +816,7 @@ void setReasonerState(ReasonerState reasonerState) { @Override public void writeInferences(OutputStream stream) throws IOException { + QueryResult queryAnswer; Set toBeQueriedHeadPredicates = new HashSet(); for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { @@ -829,19 +831,20 @@ public void writeInferences(OutputStream stream) throws IOException { } for (Predicate predicate : toBeQueriedHeadPredicates) { - List tobeGroundedVariables = new ArrayList(); + List toBeGroundedVariables = new ArrayList(); for (int i = 0; i < predicate.getArity(); i++) { - tobeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); + toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } try (final QueryResultIterator answers = this - .answerQuery(Expressions.makePositiveLiteral(predicate, tobeGroundedVariables), true)) { - answers.forEachRemaining(queryAnswer -> { + .answerQuery(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables), true)) { + while (answers.hasNext()) { + queryAnswer = answers.next(); try { stream.write(Serializer.getFactString(predicate, queryAnswer.getTerms()).getBytes()); } catch (IOException e) { throw new RuntimeException(e); } - }); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index abca5ffd7..61353b99d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -68,7 +68,7 @@ public class RuleParserTest { private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - private final PositiveLiteral fact = Expressions.makePositiveLiteral("http://example.org/s", c); + private final Fact fact = Expressions.makeFact("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); @@ -454,7 +454,7 @@ public void testWriteInferences() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); final InMemoryDataSource locations = new InMemoryDataSource(2, 1); locations.addTuple("dresden", "germany"); - RuleParser.parseInto(kb, fact.toString() + "."); + kb.addStatement(fact); final String rules = "locatedIn(Egypt,Africa). \n" // + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // + "city(dresden). \n" // From 717156fd5a2caa1047145684836c8da092d4e854 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 10 Feb 2020 19:13:35 +0100 Subject: [PATCH 0734/1255] refactoring: * moved vLog-specific data structures into new VLogKnowledgeBase class * moved abstract method getConfigurationString from DatSource to VLogDataSource --- .../vlog4j/core/model/api/DataSource.java | 8 - .../implementation/InMemoryDataSource.java | 21 +- .../implementation/VLogDataSource.java | 8 + .../implementation/VLogKnowledgeBase.java | 282 ++++++++++++++ .../reasoner/implementation/VLogReasoner.java | 355 ++++-------------- 5 files changed, 378 insertions(+), 296 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java index 504603d71..888d30f77 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java @@ -30,14 +30,6 @@ */ public interface DataSource extends Entity { - /** - * Constructs a String representation of the data source. - * - * @return a String representation of the data source configuration for a - * certain predicate. - */ - public String toConfigString(); - /** * Retrieve the required arity of target predicates for the data source. * diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 6a290f021..947b78078 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -48,8 +48,10 @@ public class InMemoryDataSource implements DataSource { * efficiency, the actual number of facts should exactly correspond to this * capacity. * - * @param arity the number of parameters in a fact from this source - * @param initialCapacity the planned number of facts + * @param arity + * the number of parameters in a fact from this source + * @param initialCapacity + * the planned number of facts */ public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; @@ -61,7 +63,8 @@ public InMemoryDataSource(final int arity, final int initialCapacity) { * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. * - * @param constantNames the string names of the constants in this fact + * @param constantNames + * the string names of the constants in this fact */ public void addTuple(final String... constantNames) { if (constantNames.length != this.arity) { @@ -97,7 +100,7 @@ public String[][] getData() { public String getSyntacticRepresentation() { final StringBuilder sb = new StringBuilder( "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); - for (int i = 0; i < this.getData().length; i++) { + for (int i = 0; i < getData().length; i++) { for (int j = 0; j < this.data[i].length; j++) { sb.append(this.data[i][j] + " "); } @@ -106,14 +109,4 @@ public String getSyntacticRepresentation() { return sb.toString(); } - /** - * Returns null to indicate that this {@link DataSource} cannot be passed to - * VLog in a configuration string. - */ - - @Override - public String toConfigString() { - return null; - } - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java index 04024d3d2..0cab0e979 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java @@ -33,4 +33,12 @@ public abstract class VLogDataSource implements DataSource { public static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; public static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; + /** + * Constructs a String representation of the data source. + * + * @return a String representation of the data source configuration for a + * certain predicate. + */ + public abstract String toConfigString(); + } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java new file mode 100644 index 000000000..73f4adf18 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -0,0 +1,282 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +/** + * Class for organizing a Knowledge Base using vLog-specific data structures. + * + * @author Irina Dragoste + * + */ +public class VLogKnowledgeBase { + + private final Map edbPredicates = new HashMap<>(); + private final Map aliasesForEdbPredicates = new HashMap<>(); + + private final Set aliasedEdbPredicates = new HashSet<>(); + + private final Set idbPredicates = new HashSet<>(); + + private final Map> directEdbFacts = new HashMap<>(); + + private final Set rules = new HashSet<>(); + + /** + * Package-protected constructor, that organizes given {@code knowledgeBase} in + * vLog-specific data structures. + * + * @param knowledgeBase + */ + VLogKnowledgeBase(final KnowledgeBase knowledgeBase) { + final LoadKbVisitor visitor = this.new LoadKbVisitor(); + visitor.clearIndexes(); + for (final Statement statement : knowledgeBase) { + statement.accept(visitor); + } + } + + boolean hasData() { + return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); + } + + public boolean hasRules() { + return !this.rules.isEmpty(); + } + + Predicate getAlias(final Predicate predicate) { + if (this.edbPredicates.containsKey(predicate)) { + return predicate; + } else { + return this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + } + + String getVLogDataSourcesConfigurationString() { + final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); + int dataSourceIndex = 0; + + for (final Entry e : this.edbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getValue().getDataSource(), e.getKey(), + dataSourceIndex, formatter); + } + + for (final Entry e : this.aliasesForEdbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getKey().getDataSource(), e.getValue(), + dataSourceIndex, formatter); + } + + formatter.close(); + return configStringBuilder.toString(); + } + + int addDataSourceConfigurationString(final DataSource dataSource, final Predicate predicate, + final int dataSourceIndex, final Formatter formatter) { + int newDataSourceIndex = dataSourceIndex; + + if (dataSource != null) { + if (dataSource instanceof VLogDataSource) { + final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; + final String configString = vLogDataSource.toConfigString(); + if (configString != null) { + formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); + newDataSourceIndex++; + } + } + } + + return newDataSourceIndex; + } + + Map getEdbPredicates() { + return this.edbPredicates; + } + + Map getAliasesForEdbPredicates() { + return this.aliasesForEdbPredicates; + } + + Map> getDirectEdbFacts() { + return this.directEdbFacts; + } + + Set getRules() { + return this.rules; + } + + /** + * + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + */ + + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + VLogKnowledgeBase.this.edbPredicates.clear(); + VLogKnowledgeBase.this.idbPredicates.clear(); + VLogKnowledgeBase.this.aliasedEdbPredicates.clear(); + VLogKnowledgeBase.this.aliasesForEdbPredicates.clear(); + VLogKnowledgeBase.this.directEdbFacts.clear(); + VLogKnowledgeBase.this.rules.clear(); + } + + @Override + public Void visit(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!VLogKnowledgeBase.this.directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList<>(); + facts.add(fact); + VLogKnowledgeBase.this.directEdbFacts.put(predicate, facts); + } else { + VLogKnowledgeBase.this.directEdbFacts.get(predicate).add(fact); + } + return null; + } + + @Override + public Void visit(final Rule statement) { + VLogKnowledgeBase.this.rules.add(statement); + for (final PositiveLiteral positiveLiteral : statement.getHead()) { + final Predicate predicate = positiveLiteral.getPredicate(); + if (!VLogKnowledgeBase.this.idbPredicates.contains(predicate)) { + if (VLogKnowledgeBase.this.edbPredicates.containsKey(predicate)) { + addEdbAlias(VLogKnowledgeBase.this.edbPredicates.get(predicate)); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } + VLogKnowledgeBase.this.idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(final DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + if (VLogKnowledgeBase.this.idbPredicates.contains(predicate) + || VLogKnowledgeBase.this.aliasedEdbPredicates.contains(predicate)) { + if (!VLogKnowledgeBase.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + final DataSourceDeclaration currentMainDeclaration = VLogKnowledgeBase.this.edbPredicates.get(predicate); + if (currentMainDeclaration == null) { + VLogKnowledgeBase.this.edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), + predicate.getArity()); + } + VLogKnowledgeBase.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + VLogKnowledgeBase.this.aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new UniversalVariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), + new ConjunctionImpl<>(Arrays.asList(body))); + VLogKnowledgeBase.this.rules.add(rule); + } + + } + + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return this.predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return this.predicate.equals(other.predicate); + } + } + +} \ No newline at end of file diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 260db93e2..9c9acec91 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -2,33 +2,18 @@ import java.io.IOException; import java.text.MessageFormat; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; import org.semanticweb.vlog4j.core.reasoner.Algorithm; import org.semanticweb.vlog4j.core.reasoner.Correctness; @@ -82,162 +67,9 @@ public class VLogReasoner implements Reasoner { private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - /** - * Dummy data source declaration for predicates for which we have explicit local - * facts in the input. - * - * @author Markus Kroetzsch - * - */ - class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { - - final Predicate predicate; - - public LocalFactsDataSourceDeclaration(Predicate predicate) { - this.predicate = predicate; - } - - @Override - public T accept(StatementVisitor statementVisitor) { - return statementVisitor.visit(this); - } - - @Override - public Predicate getPredicate() { - return this.predicate; - } - - @Override - public DataSource getDataSource() { - return null; - } - - @Override - public int hashCode() { - return predicate.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return predicate.equals(other.predicate); - } - - } - - /** - * Local visitor implementation for processing statements upon loading. Internal - * index structures are updated based on the statements that are detected. - * - * @author Markus Kroetzsch - * - */ - class LoadKbVisitor implements StatementVisitor { - - public void clearIndexes() { - edbPredicates.clear(); - idbPredicates.clear(); - aliasedEdbPredicates.clear(); - aliasesForEdbPredicates.clear(); - directEdbFacts.clear(); - rules.clear(); - } - - @Override - public Void visit(Fact statement) { - final Predicate predicate = statement.getPredicate(); - registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList(); - facts.add(statement); - directEdbFacts.put(predicate, facts); - } else { - directEdbFacts.get(predicate).add(statement); - } - return null; - } - - @Override - public Void visit(Rule statement) { - rules.add(statement); - for (final PositiveLiteral positiveLiteral : statement.getHead()) { - final Predicate predicate = positiveLiteral.getPredicate(); - if (!idbPredicates.contains(predicate)) { - if (edbPredicates.containsKey(predicate)) { - addEdbAlias(edbPredicates.get(predicate)); - edbPredicates.remove(predicate); - } - idbPredicates.add(predicate); - } - } - return null; - } - - @Override - public Void visit(DataSourceDeclaration statement) { - registerEdbDeclaration(statement); - return null; - } - - void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { - if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { - addEdbAlias(dataSourceDeclaration); - } - } else { - final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); - if (currentMainDeclaration == null) { - edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { - addEdbAlias(currentMainDeclaration); - addEdbAlias(dataSourceDeclaration); - edbPredicates.remove(predicate); - } // else: predicate already known to have local facts (only) - } - } - - void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - Predicate aliasPredicate; - if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { - aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); - } else { - aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), - predicate.getArity()); - } - aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - aliasedEdbPredicates.add(predicate); - - final List terms = new ArrayList<>(); - for (int i = 1; i <= predicate.getArity(); i++) { - terms.add(new UniversalVariableImpl("X" + i)); - } - final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); - final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), - new ConjunctionImpl(Arrays.asList(body))); - rules.add(rule); - } - - } - final KnowledgeBase knowledgeBase; final VLog vLog = new VLog(); - final Map aliasesForEdbPredicates = new HashMap<>(); - final Set idbPredicates = new HashSet<>(); - final Map edbPredicates = new HashMap<>(); - final Set aliasedEdbPredicates = new HashSet<>(); - final Map> directEdbFacts = new HashMap<>(); - final Set rules = new HashSet<>(); - private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; @@ -332,131 +164,108 @@ void load() throws IOException { void loadKnowledgeBase() throws IOException { LOGGER.info("Started loading knowledge base ..."); - final LoadKbVisitor visitor = new LoadKbVisitor(); - visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { - statement.accept(visitor); - } - if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { - LOGGER.warn("No facts have been provided."); - } + final VLogKnowledgeBase vLogKB = new VLogKnowledgeBase(this.knowledgeBase); - try { - this.vLog.start(getDataSourcesConfigurationString(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + if (!vLogKB.hasData()) { + LOGGER.warn("No data statements (facts or datasource declarations) have been provided."); } - loadInMemoryDataSources(); - validateDataSourcePredicateArities(); + // 1. vLog is initialized by loading VLog data sources + loadVLogDataSources(vLogKB); + + // 2. in-memory data is loaded + loadInMemoryDataSources(vLogKB); + + validateDataSourcePredicateArities(vLogKB); - loadFacts(); - loadRules(); + loadFacts(vLogKB); + + // 3. rules are loaded + loadRules(vLogKB); this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + this.correctness = !vLogKB.hasRules() ? Correctness.SOUND_AND_COMPLETE + : Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } - String getDataSourcesConfigurationString() { - final StringBuilder configStringBuilder = new StringBuilder(); - final Formatter formatter = new Formatter(configStringBuilder); - int dataSourceIndex = 0; - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), predicate, - dataSourceIndex, formatter); + void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { + try { + this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - dataSourceIndex = addDataSourceConfigurationString(dataSourceDeclaration.getDataSource(), aliasPredicate, - dataSourceIndex, formatter); + } + + void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { + vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); + + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); + } + + void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { + if (dataSource instanceof InMemoryDataSource) { + + final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; + try { + load(predicate, inMemoryDataSource); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } } - formatter.close(); - return configStringBuilder.toString(); - } - - int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, int dataSourceIndex, - Formatter formatter) { - if (dataSource != null) { - final String configString = dataSource.toConfigString(); - if (configString != null) { - formatter.format(dataSource.toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - return dataSourceIndex + 1; + } + + void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); } } - return dataSourceIndex; } /** * Checks if the loaded external data sources do in fact contain data of the * correct arity. - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ - void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : edbPredicates.keySet()) { - validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); - } - for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { - validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), - dataSourceDeclaration.getDataSource()); - } - } + void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) + throws IncompatiblePredicateArityException { - void loadInMemoryDataSources() { - for (final Predicate predicate : this.edbPredicates.keySet()) { - final DataSourceDeclaration dataSourceDeclaration = this.edbPredicates.get(predicate); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), predicate); - } - for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { - final Predicate aliasPredicate = this.aliasesForEdbPredicates.get(dataSourceDeclaration); - loadInMemoryDataSource(dataSourceDeclaration.getDataSource(), aliasPredicate); - } - } + vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); - void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { - final InMemoryDataSource inMemoryDataSource; - if (dataSource instanceof InMemoryDataSource) { - inMemoryDataSource = (InMemoryDataSource) dataSource; - } else { - return; - } - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); - this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } + vLogKB.getAliasesForEdbPredicates() + .forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); } /** * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. - * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * + * @param predicate + * the predicate for which data is loaded + * @param dataSource + * the data source used + * + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { - if (dataSource == null) + if (dataSource == null) { return; + } try { final int dataSourcePredicateArity = this.vLog .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); @@ -470,19 +279,16 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource } } - void loadFacts() { - for (final Predicate predicate : directEdbFacts.keySet()) { - Predicate aliasPredicate; - if (edbPredicates.containsKey(predicate)) { - aliasPredicate = predicate; - } else { - aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); - } + void loadFacts(final VLogKnowledgeBase vLogKB) { + final Map> directEdbFacts = vLogKB.getDirectEdbFacts(); + + directEdbFacts.forEach((k, v) -> { try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); - final String[][] vLogPredicateTuples = ModelToVLogConverter - .toVLogFactTuples(directEdbFacts.get(predicate)); + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(vLogKB.getAlias(k)); + final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(v); + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); @@ -491,11 +297,12 @@ void loadFacts() { } catch (final EDBConfigurationException e) { throw new RuntimeException("Invalid data sources configuration!", e); } - } + + }); } - void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); + void loadRules(final VLogKnowledgeBase vLogKB) { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(vLogKB.getRules()); final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { @@ -707,7 +514,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { try { load(); - } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 throw new RuntimeException(e); } } From 27cf1fa34e8d1ad7fe83ecbccbbcce4671915b65 Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 13 Feb 2020 13:30:53 +0100 Subject: [PATCH 0735/1255] added some changes --- .../vlog4j/core/reasoner/Reasoner.java | 8 +++++--- .../reasoner/implementation/VLogReasoner.java | 19 +++++++++++++++---- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 62fb60c8a..d5effec06 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,12 +88,14 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the (explicit and implicit) facts of the knowledge base to an - * OutputStream. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase} Exports all + * the (explicit and implicit) facts of the knowledge base to an + * OutputStream. * * @param an OutputStream for the facts to be written to. */ - void writeInferences(OutputStream stream) throws IOException; + Correctness writeInferences(OutputStream stream) throws IOException; /** * Exports all the (explicit and implicit) facts of the knowledge base to a diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 0ed188ff0..8ac0944d0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -815,9 +815,11 @@ void setReasonerState(ReasonerState reasonerState) { } @Override - public void writeInferences(OutputStream stream) throws IOException { + public Correctness writeInferences(OutputStream stream) throws IOException { QueryResult queryAnswer; Set toBeQueriedHeadPredicates = new HashSet(); + TermQueryResultIterator stringQueryResultIterator; + QueryResultIterator answers; for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); @@ -835,8 +837,11 @@ public void writeInferences(OutputStream stream) throws IOException { for (int i = 0; i < predicate.getArity(); i++) { toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); } - try (final QueryResultIterator answers = this - .answerQuery(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables), true)) { + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter + .toVLogAtom(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables)); + try { + stringQueryResultIterator = this.vLog.query(vLogAtom, true, false); + answers = new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); while (answers.hasNext()) { queryAnswer = answers.next(); try { @@ -845,10 +850,16 @@ public void writeInferences(OutputStream stream) throws IOException { throw new RuntimeException(e); } } - + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + predicate + + " that does not occur in the knowledge base. Answer must be empty!"); } } + return this.correctness; + } @Override From e0f7b6079451d1d16dddbc7438d8e5723f8f56fd Mon Sep 17 00:00:00 2001 From: alloka Date: Thu, 13 Feb 2020 15:53:11 +0100 Subject: [PATCH 0736/1255] added some changes --- .../vlog4j/core/reasoner/Reasoner.java | 14 ++++++----- .../reasoner/implementation/VLogReasoner.java | 23 +++++++++++-------- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index d5effec06..9986ddac0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -88,22 +88,24 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * @return the correctness of the query answers, depending on the state of the - * reasoning (materialisation) and its {@link KnowledgeBase} Exports all - * the (explicit and implicit) facts of the knowledge base to an - * OutputStream. + * Exports all the (explicit and implicit) facts of the knowledge base to an + * OutputStream. * * @param an OutputStream for the facts to be written to. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. */ Correctness writeInferences(OutputStream stream) throws IOException; /** * Exports all the (explicit and implicit) facts of the knowledge base to a * desired file. - * + * * @param a String of the file path for the facts to be written to. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. */ - void writeInferences(String filePath) throws IOException; + Correctness writeInferences(String filePath) throws IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 8ac0944d0..664710240 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -22,7 +22,6 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; @@ -816,10 +815,11 @@ void setReasonerState(ReasonerState reasonerState) { @Override public Correctness writeInferences(OutputStream stream) throws IOException { - QueryResult queryAnswer; + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } Set toBeQueriedHeadPredicates = new HashSet(); - TermQueryResultIterator stringQueryResultIterator; - QueryResultIterator answers; for (Rule rule : this.knowledgeBase.getRules()) { for (Literal literal : rule.getHead()) { toBeQueriedHeadPredicates.add(literal.getPredicate()); @@ -840,12 +840,12 @@ public Correctness writeInferences(OutputStream stream) throws IOException { final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter .toVLogAtom(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables)); try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, false); - answers = new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false); while (answers.hasNext()) { - queryAnswer = answers.next(); + final karmaresearch.vlog.Term[] vlogTerms = answers.next(); + final List termList = VLogToModelConverter.toTermList(vlogTerms); try { - stream.write(Serializer.getFactString(predicate, queryAnswer.getTerms()).getBytes()); + stream.write(Serializer.getFactString(predicate, termList).getBytes()); } catch (IOException e) { throw new RuntimeException(e); } @@ -855,17 +855,22 @@ public Correctness writeInferences(OutputStream stream) throws IOException { } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + predicate + " that does not occur in the knowledge base. Answer must be empty!"); + throw new RuntimeException("Inconsistent knowledge base state.", e1); } } + logWarningOnCorrectness(); return this.correctness; } @Override - public void writeInferences(String filePath) throws IOException { + public Correctness writeInferences(String filePath) throws IOException { try (OutputStream stream = new FileOutputStream(filePath)) { writeInferences(stream); } + logWarningOnCorrectness(); + return this.correctness; } + } From 08def24bc48cedd70d737b9cff23901f69a4b5d7 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 17 Feb 2020 11:38:26 +0100 Subject: [PATCH 0737/1255] rework after review cleaner code --- .../vlog4j/core/reasoner/Reasoner.java | 20 +- .../reasoner/implementation/VLogReasoner.java | 270 +++++++++--------- 2 files changed, 153 insertions(+), 137 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 9986ddac0..dd4fd9b21 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.reasoner; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; @@ -88,24 +89,29 @@ static Reasoner getInstance() { KnowledgeBase getKnowledgeBase(); /** - * Exports all the (explicit and implicit) facts of the knowledge base to an - * OutputStream. + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to an OutputStream. * - * @param an OutputStream for the facts to be written to. + * @param an + * OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException */ Correctness writeInferences(OutputStream stream) throws IOException; /** - * Exports all the (explicit and implicit) facts of the knowledge base to a - * desired file. + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to a desired file. * - * @param a String of the file path for the facts to be written to. + * @param a + * String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException + * @throws FileNotFoundException */ - Correctness writeInferences(String filePath) throws IOException; + Correctness writeInferences(String filePath) throws FileNotFoundException, IOException; /** * Sets the algorithm that will be used for reasoning over the knowledge base. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 664710240..a55f07e2e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,5 +1,6 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -118,19 +119,22 @@ public DataSource getDataSource() { @Override public int hashCode() { - return predicate.hashCode(); + return this.predicate.hashCode(); } @Override public boolean equals(Object obj) { - if (this == obj) + if (this == obj) { return true; - if (obj == null) + } + if (obj == null) { return false; - if (getClass() != obj.getClass()) + } + if (getClass() != obj.getClass()) { return false; + } final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return predicate.equals(other.predicate); + return this.predicate.equals(other.predicate); } } @@ -145,39 +149,39 @@ public boolean equals(Object obj) { class LoadKbVisitor implements StatementVisitor { public void clearIndexes() { - edbPredicates.clear(); - idbPredicates.clear(); - aliasedEdbPredicates.clear(); - aliasesForEdbPredicates.clear(); - directEdbFacts.clear(); - rules.clear(); + VLogReasoner.this.edbPredicates.clear(); + VLogReasoner.this.idbPredicates.clear(); + VLogReasoner.this.aliasedEdbPredicates.clear(); + VLogReasoner.this.aliasesForEdbPredicates.clear(); + VLogReasoner.this.directEdbFacts.clear(); + VLogReasoner.this.rules.clear(); } @Override public Void visit(Fact statement) { final Predicate predicate = statement.getPredicate(); registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList(); + if (!VLogReasoner.this.directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList<>(); facts.add(statement); - directEdbFacts.put(predicate, facts); + VLogReasoner.this.directEdbFacts.put(predicate, facts); } else { - directEdbFacts.get(predicate).add(statement); + VLogReasoner.this.directEdbFacts.get(predicate).add(statement); } return null; } @Override public Void visit(Rule statement) { - rules.add(statement); + VLogReasoner.this.rules.add(statement); for (final PositiveLiteral positiveLiteral : statement.getHead()) { final Predicate predicate = positiveLiteral.getPredicate(); - if (!idbPredicates.contains(predicate)) { - if (edbPredicates.containsKey(predicate)) { - addEdbAlias(edbPredicates.get(predicate)); - edbPredicates.remove(predicate); + if (!VLogReasoner.this.idbPredicates.contains(predicate)) { + if (VLogReasoner.this.edbPredicates.containsKey(predicate)) { + addEdbAlias(VLogReasoner.this.edbPredicates.get(predicate)); + VLogReasoner.this.edbPredicates.remove(predicate); } - idbPredicates.add(predicate); + VLogReasoner.this.idbPredicates.add(predicate); } } return null; @@ -191,18 +195,19 @@ public Void visit(DataSourceDeclaration statement) { void registerEdbDeclaration(DataSourceDeclaration dataSourceDeclaration) { final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (idbPredicates.contains(predicate) || aliasedEdbPredicates.contains(predicate)) { - if (!aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + if (VLogReasoner.this.idbPredicates.contains(predicate) + || VLogReasoner.this.aliasedEdbPredicates.contains(predicate)) { + if (!VLogReasoner.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { addEdbAlias(dataSourceDeclaration); } } else { - final DataSourceDeclaration currentMainDeclaration = edbPredicates.get(predicate); + final DataSourceDeclaration currentMainDeclaration = VLogReasoner.this.edbPredicates.get(predicate); if (currentMainDeclaration == null) { - edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!(currentMainDeclaration.equals(dataSourceDeclaration))) { + VLogReasoner.this.edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { addEdbAlias(currentMainDeclaration); addEdbAlias(dataSourceDeclaration); - edbPredicates.remove(predicate); + VLogReasoner.this.edbPredicates.remove(predicate); } // else: predicate already known to have local facts (only) } } @@ -216,8 +221,8 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), predicate.getArity()); } - aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - aliasedEdbPredicates.add(predicate); + VLogReasoner.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + VLogReasoner.this.aliasedEdbPredicates.add(predicate); final List terms = new ArrayList<>(); for (int i = 1; i <= predicate.getArity(); i++) { @@ -225,9 +230,9 @@ void addEdbAlias(DataSourceDeclaration dataSourceDeclaration) { } final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl(Arrays.asList(head)), - new ConjunctionImpl(Arrays.asList(body))); - rules.add(rule); + final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), + new ConjunctionImpl<>(Arrays.asList(body))); + VLogReasoner.this.rules.add(rule); } } @@ -338,11 +343,11 @@ void loadKnowledgeBase() throws IOException { LOGGER.info("Started loading knowledge base ..."); final LoadKbVisitor visitor = new LoadKbVisitor(); visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { + for (final Statement statement : this.knowledgeBase) { statement.accept(visitor); } - if (edbPredicates.isEmpty() && aliasedEdbPredicates.isEmpty()) { + if (this.edbPredicates.isEmpty() && this.aliasedEdbPredicates.isEmpty()) { LOGGER.warn("No facts have been provided."); } @@ -363,7 +368,7 @@ void loadKnowledgeBase() throws IOException { this.reasonerState = ReasonerState.KB_LOADED; // if there are no rules, then materialisation state is complete - this.correctness = rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + this.correctness = this.rules.isEmpty() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; LOGGER.info("Finished loading knowledge base."); } @@ -403,15 +408,15 @@ int addDataSourceConfigurationString(DataSource dataSource, Predicate predicate, * Checks if the loaded external data sources do in fact contain data of the * correct arity. * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : edbPredicates.keySet()) { - validateDataSourcePredicateArity(predicate, edbPredicates.get(predicate).getDataSource()); + for (final Predicate predicate : this.edbPredicates.keySet()) { + validateDataSourcePredicateArity(predicate, this.edbPredicates.get(predicate).getDataSource()); } - for (final DataSourceDeclaration dataSourceDeclaration : aliasesForEdbPredicates.keySet()) { - validateDataSourcePredicateArity(aliasesForEdbPredicates.get(dataSourceDeclaration), + for (final DataSourceDeclaration dataSourceDeclaration : this.aliasesForEdbPredicates.keySet()) { + validateDataSourcePredicateArity(this.aliasesForEdbPredicates.get(dataSourceDeclaration), dataSourceDeclaration.getDataSource()); } } @@ -451,16 +456,19 @@ void loadInMemoryDataSource(DataSource dataSource, Predicate predicate) { * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used + * @param predicate + * the predicate for which data is loaded + * @param dataSource + * the data source used * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) + * @throws IncompatiblePredicateArityException + * to indicate a problem (non-checked exception) */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { - if (dataSource == null) + if (dataSource == null) { return; + } try { final int dataSourcePredicateArity = this.vLog .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); @@ -475,17 +483,17 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource } void loadFacts() { - for (final Predicate predicate : directEdbFacts.keySet()) { + for (final Predicate predicate : this.directEdbFacts.keySet()) { Predicate aliasPredicate; - if (edbPredicates.containsKey(predicate)) { + if (this.edbPredicates.containsKey(predicate)) { aliasPredicate = predicate; } else { - aliasPredicate = aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + aliasPredicate = this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); } try { final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(aliasPredicate); final String[][] vLogPredicateTuples = ModelToVLogConverter - .toVLogFactTuples(directEdbFacts.get(predicate)); + .toVLogFactTuples(this.directEdbFacts.get(predicate)); this.vLog.addData(vLogPredicateName, vLogPredicateTuples); if (LOGGER.isDebugEnabled()) { for (final String[] tuple : vLogPredicateTuples) { @@ -499,7 +507,7 @@ void loadFacts() { } void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(rules); + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(this.rules); final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); try { @@ -616,6 +624,8 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); throw new IllegalArgumentException(MessageFormat.format( "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); } @@ -624,6 +634,42 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St return this.correctness; } + @Override + public Correctness writeInferences(OutputStream stream) throws IOException { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Obtaining inferences is not alowed before reasoner is loaded!"); + } + final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); + + for (final Predicate predicate : toBeQueriedHeadPredicates) { + final PositiveLiteral queryAtom = getQueryAtom(predicate); + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); + try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { + while (answers.hasNext()) { + final karmaresearch.vlog.Term[] vlogTerms = answers.next(); + final List termList = VLogToModelConverter.toTermList(vlogTerms); + stream.write(Serializer.getFactString(predicate, termList).getBytes()); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + throw new RuntimeException("Inconsistent knowledge base state.", e1); + } + } + + logWarningOnCorrectness(); + return this.correctness; + } + + @Override + public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + return writeInferences(stream); + } + } + private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); @@ -706,25 +752,6 @@ public boolean isMFC() { return checkCyclic.equals(CyclicCheckResult.CYCLIC); } - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - try { - load(); - } catch (IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 - throw new RuntimeException(e); - } - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); - } - @Override public CyclicityResult checkForCycles() { final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); @@ -771,6 +798,49 @@ public void onStatementsRemoved(List statementsRemoved) { updateCorrectnessOnStatementsRemoved(); } + Set getKnolwedgeBasePredicates() { + final Set toBeQueriedHeadPredicates = new HashSet<>(); + for (final Rule rule : this.knowledgeBase.getRules()) { + for (final Literal literal : rule.getHead()) { + toBeQueriedHeadPredicates.add(literal.getPredicate()); + } + } + for (final DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { + toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); + } + for (final Fact fact : this.knowledgeBase.getFacts()) { + toBeQueriedHeadPredicates.add(fact.getPredicate()); + } + return toBeQueriedHeadPredicates; + } + + private PositiveLiteral getQueryAtom(final Predicate predicate) { + final List toBeGroundedVariables = new ArrayList<>(predicate.getArity()); + for (int i = 0; i < predicate.getArity(); i++) { + toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); + } + return Expressions.makePositiveLiteral(predicate, toBeGroundedVariables); + } + + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + load(); + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); + } + private void updateReasonerToKnowledgeBaseChanged() { if (this.reasonerState.equals(ReasonerState.KB_LOADED) || this.reasonerState.equals(ReasonerState.MATERIALISED)) { @@ -813,64 +883,4 @@ void setReasonerState(ReasonerState reasonerState) { this.reasonerState = reasonerState; } - @Override - public Correctness writeInferences(OutputStream stream) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Set toBeQueriedHeadPredicates = new HashSet(); - for (Rule rule : this.knowledgeBase.getRules()) { - for (Literal literal : rule.getHead()) { - toBeQueriedHeadPredicates.add(literal.getPredicate()); - } - } - for (DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { - toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); - } - for (Fact fact : this.knowledgeBase.getFacts()) { - toBeQueriedHeadPredicates.add(fact.getPredicate()); - } - - for (Predicate predicate : toBeQueriedHeadPredicates) { - List toBeGroundedVariables = new ArrayList(); - for (int i = 0; i < predicate.getArity(); i++) { - toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); - } - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter - .toVLogAtom(Expressions.makePositiveLiteral(predicate, toBeGroundedVariables)); - try { - final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false); - while (answers.hasNext()) { - final karmaresearch.vlog.Term[] vlogTerms = answers.next(); - final List termList = VLogToModelConverter.toTermList(vlogTerms); - try { - stream.write(Serializer.getFactString(predicate, termList).getBytes()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + predicate - + " that does not occur in the knowledge base. Answer must be empty!"); - throw new RuntimeException("Inconsistent knowledge base state.", e1); - } - - } - logWarningOnCorrectness(); - return this.correctness; - - } - - @Override - public Correctness writeInferences(String filePath) throws IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - writeInferences(stream); - } - logWarningOnCorrectness(); - return this.correctness; - } - } From 90bee585ddef24c590599e7513e61c3b8999f552 Mon Sep 17 00:00:00 2001 From: alloka Date: Mon, 17 Feb 2020 16:37:13 +0100 Subject: [PATCH 0738/1255] moved unit test to reasoner --- .../VlogReasonerWriteInferencesTest.java | 101 ++++++++++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 42 -------- 2 files changed, 101 insertions(+), 42 deletions(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java new file mode 100644 index 000000000..73af579c0 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java @@ -0,0 +1,101 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; + +import java.io.BufferedReader; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class VlogReasonerWriteInferencesTest { + final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); + final Fact fact = Expressions.makeFact("http://example.org/s", c); + final AbstractConstant dresdenConst = Expressions.makeAbstractConstant("dresden"); + final Predicate locatedInPred = Expressions.makePredicate("LocatedIn", 2); + final Predicate addressPred = Expressions.makePredicate("address", 4); + final Predicate universityPred = Expressions.makePredicate("university", 2); + final UniversalVariable varX = Expressions.makeUniversalVariable("X"); + final UniversalVariable varY = Expressions.makeUniversalVariable("Y"); + final PositiveLiteral pl1 = Expressions.makePositiveLiteral(locatedInPred, varX, varY); + final PositiveLiteral pl2 = Expressions.makePositiveLiteral("location", varX, varY); + final PositiveLiteral pl3 = Expressions.makePositiveLiteral(addressPred, varX, + Expressions.makeExistentialVariable("Y"), Expressions.makeExistentialVariable("Z"), + Expressions.makeExistentialVariable("Q")); + final PositiveLiteral pl4 = Expressions.makePositiveLiteral(locatedInPred, Expressions.makeExistentialVariable("Q"), + Expressions.makeUniversalVariable("F")); + final PositiveLiteral pl5 = Expressions.makePositiveLiteral(universityPred, varX, + Expressions.makeUniversalVariable("F")); + final Conjunction conjunction = Expressions.makePositiveConjunction(pl3, pl4); + final Rule rule1 = Expressions.makeRule(pl1, pl2); + final Rule rule2 = Expressions.makeRule(conjunction, Expressions.makeConjunction(pl5)); + final Fact f1 = Expressions.makeFact(locatedInPred, Expressions.makeAbstractConstant("Egypt"), + Expressions.makeAbstractConstant("Africa")); + final Fact f2 = Expressions.makeFact(addressPred, Expressions.makeAbstractConstant("TSH"), + Expressions.makeAbstractConstant("Pragerstraße13"), Expressions.makeAbstractConstant("01069"), + dresdenConst); + final Fact f3 = Expressions.makeFact("city", dresdenConst); + final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); + final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), + Expressions.makeAbstractConstant("germany")); + final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + + @Test + public void testWriteInferences() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatements(rule1, rule2, f1, f2, f3, f4, f5); + locations.addTuple("dresden", "germany"); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); + List inferences = new ArrayList(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + reasoner.writeInferences(stream); + stream.flush(); + try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { + String factString = ""; + while ((factString = input.readLine()) != null) { + inferences.add(factString); + } + + } + assertEquals(10, inferences.size()); + } + + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 61353b99d..8a75123bd 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -23,13 +23,8 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import java.io.BufferedReader; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -44,12 +39,7 @@ import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -449,36 +439,4 @@ public void testCustomDatatype() throws ParsingException { assertEquals(constant, result); } - @Test - public void testWriteInferences() throws ParsingException, IOException { - KnowledgeBase kb = new KnowledgeBase(); - final InMemoryDataSource locations = new InMemoryDataSource(2, 1); - locations.addTuple("dresden", "germany"); - kb.addStatement(fact); - final String rules = "locatedIn(Egypt,Africa). \n" // - + "address(TSH, \"Pragerstraße 13\", \"01069\", dresden). \n" // - + "city(dresden). \n" // - + "country(germany). \n" // - + "university(tudresden, germany). \n" // - + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // - + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n"; - RuleParser.parseInto(kb, rules); - kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); - List inferences = new ArrayList(); - try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - reasoner.writeInferences(stream); - stream.flush(); - try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { - String factString = ""; - while ((factString = input.readLine()) != null) { - inferences.add(factString); - } - - } - assertEquals(10, inferences.size()); - } - - } } From 0b3f2303fe40ec183f3d313f8c41fb0755c67b0a Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 17 Feb 2020 17:43:53 +0100 Subject: [PATCH 0739/1255] license header VLogKnowledgeBase --- LICENSE.txt | 402 +++++++++--------- .../implementation/VLogKnowledgeBase.java | 22 +- 2 files changed, 222 insertions(+), 202 deletions(-) diff --git a/LICENSE.txt b/LICENSE.txt index 261eeb9e9..29f81d812 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index 73f4adf18..232ecafe9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -1,5 +1,25 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2020 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.ArrayList; import java.util.Arrays; import java.util.Formatter; @@ -279,4 +299,4 @@ public boolean equals(Object obj) { } } -} \ No newline at end of file +} From 43b19b211a4a4f93d51037099dfc0f26b7937319 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Tue, 18 Feb 2020 10:07:42 +0100 Subject: [PATCH 0740/1255] fixes #157 * Reasoner#exportQueryAnswersToCsv for an unknown (not in KB) predicate should not throw exception, but log a warning --- .../reasoner/implementation/VLogReasoner.java | 5 +-- .../implementation/VLogReasonerCsvOutput.java | 34 +++++++++---------- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 482f36b90..989f61ff1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -4,7 +4,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; -import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -428,9 +427,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St throw new RuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - throw new IllegalArgumentException(MessageFormat.format( - "The query predicate does not occur in the loaded Knowledge Base: {0}!", query.getPredicate()), e1); + + " that does not occur in the knowledge base. Answers are therefore empty."); } logWarningOnCorrectness(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java index 85b136306..7f1c62838 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -21,9 +21,12 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.Arrays; import java.util.List; @@ -37,6 +40,8 @@ public class VLogReasonerCsvOutput { + private final static String nonExistingFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; + @Test public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { final String predicate = "p"; @@ -85,9 +90,8 @@ public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOExcep } - @Test(expected = IllegalArgumentException.class) + @Test public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); @@ -95,14 +99,13 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() thro try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, true); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } - @Test(expected = IllegalArgumentException.class) + @Test public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); @@ -110,15 +113,14 @@ public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() thro try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, false); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, false); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } - @Test(expected = IllegalArgumentException.class) + @Test public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() throws IOException { - final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); @@ -128,26 +130,24 @@ public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() throw reasoner.load(); reasoner.reason(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, true); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } - @Test(expected = IllegalArgumentException.class) public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), Expressions.makeUniversalVariable("?y")); - final KnowledgeBase kb = new KnowledgeBase(); - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); reasoner.reason(); - final String emptyFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, false); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, false); } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); } } From 97d2b5935a1b351983bedc58715c77887bcb46b3 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Mon, 24 Feb 2020 15:21:30 +0100 Subject: [PATCH 0741/1255] fix #158 by creating new AbstractConstant --- .../core/model/implementation/Serializer.java | 19 ++++---- .../implementation/VLogToModelConverter.java | 43 ++++++++++------- .../VLogToModelConverterTest.java | 47 ++++++++++++------- 3 files changed, 65 insertions(+), 44 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 09b6f7981..a9f7006f4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -51,6 +51,7 @@ * */ public final class Serializer { + private static final String NEW_LINE = "\n"; public static final String STATEMENT_SEPARATOR = " ."; public static final String COMMA = ", "; public static final String NEGATIVE_IDENTIFIER = "~"; @@ -62,7 +63,7 @@ public final class Serializer { public static final String OPENING_BRACKET = "["; public static final String CLOSING_BRACKET = "]"; public static final String RULE_SEPARATOR = " :- "; - public static final String AT = "@"; + public static final char AT = '@'; public static final String DATA_SOURCE = "@source "; public static final String CSV_FILE_DATA_SOURCE = "load-csv"; public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; @@ -70,9 +71,9 @@ public final class Serializer { public static final String DATA_SOURCE_SEPARATOR = ": "; public static final String COLON = ":"; public static final String DOUBLE_CARET = "^^"; - public static final String LESS_THAN = "<"; - public static final String MORE_THAN = ">"; - public static final String QUOTE = "\""; + public static final char LESS_THAN = '<'; + public static final char MORE_THAN = '>'; + public static final char QUOTE = '"'; public static final String REGEX_DOUBLE = "^[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?$"; public static final String REGEX_INTEGER = "^[-+]?\\d+$"; @@ -367,7 +368,7 @@ public static String getString(final String string) { */ private static String escape(final String string) { return string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") - .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f"); + .replace(NEW_LINE, "\\n").replace("\r", "\\r").replace("\f", "\\f"); // don't touch single quotes here since we only construct double-quoted strings } @@ -380,15 +381,15 @@ private static String addAngleBrackets(final String string) { } public static String getFactString(Predicate predicate, List terms) { - return getString(predicate, terms) + STATEMENT_SEPARATOR + "\n"; + return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; } public static String getString(Predicate predicate, List terms) { - StringBuilder stringBuilder = new StringBuilder(""); - stringBuilder.append(getIRIString(predicate.getName())).append(OPENING_PARENTHESIS); + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName())); + stringBuilder.append(OPENING_PARENTHESIS); boolean first = true; - for (Term term : terms) { + for (final Term term : terms) { if (first) { first = false; } else { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java index 27fd1a2d5..3be4fdebf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java @@ -26,10 +26,11 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** * Utility class with static methods for converting from VLog internal model @@ -63,8 +64,8 @@ static QueryResult toQueryResult(karmaresearch.vlog.Term[] vLogQueryResult) { * in given {@code vLogTerms} at the same position. */ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { - List terms = new ArrayList<>(vLogTerms.length); - for (karmaresearch.vlog.Term vLogTerm : vLogTerms) { + final List terms = new ArrayList<>(vLogTerms.length); + for (final karmaresearch.vlog.Term vLogTerm : vLogTerms) { terms.add(toTerm(vLogTerm)); } return terms; @@ -79,7 +80,7 @@ static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { * {@code vLogTerm} and of the corresponding type. */ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { - String name = vLogTerm.getName(); + final String name = vLogTerm.getName(); switch (vLogTerm.getTermType()) { case CONSTANT: return toConstant(name); @@ -100,24 +101,32 @@ static Term toTerm(karmaresearch.vlog.Term vLogTerm) { * @return {@link Constant} object */ private static Constant toConstant(String vLogConstantName) { - if (vLogConstantName.charAt(0) == '<' && vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + final Constant constant; + if (vLogConstantName.charAt(0) == Serializer.LESS_THAN + && vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { // strip <> off of IRIs - return new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); - } else if (vLogConstantName.charAt(0) == '"') { - if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { - int startTypeIdx = vLogConstantName.lastIndexOf('<', vLogConstantName.length() - 2); - String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); - String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); - return new DatatypeConstantImpl(lexicalValue, datatype); + constant = new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); + } else if (vLogConstantName.charAt(0) == Serializer.QUOTE) { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == Serializer.MORE_THAN) { + final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.LESS_THAN, + vLogConstantName.length() - 2); + final String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); + final String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); + constant = new DatatypeConstantImpl(lexicalValue, datatype); } else { - int startTypeIdx = vLogConstantName.lastIndexOf('@', vLogConstantName.length() - 2); - String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); - String string = vLogConstantName.substring(1, startTypeIdx - 1); - return new LanguageStringConstantImpl(string, languageTag); + final int startTypeIdx = vLogConstantName.lastIndexOf(Serializer.AT, vLogConstantName.length() - 2); + if (startTypeIdx > -1) { + final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); + final String string = vLogConstantName.substring(1, startTypeIdx - 1); + constant = new LanguageStringConstantImpl(string, languageTag); + } else { + constant = new AbstractConstantImpl(vLogConstantName); + } } } else { - return new AbstractConstantImpl(vLogConstantName); + constant = new AbstractConstantImpl(vLogConstantName); } + return constant; } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java index 0a80eb198..64dd2469d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java @@ -19,10 +19,11 @@ * limitations under the License. * #L% */ - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; @@ -33,51 +34,61 @@ public class VLogToModelConverterTest { @Test public void testAbstractConstantConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); - Term vLog4jTerm = new AbstractConstantImpl("c"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); + final Term vLog4jTerm = new AbstractConstantImpl("c"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testAbstractConstantIriConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, ""); - Term vLog4jTerm = new AbstractConstantImpl("http://example.org/test"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final Term vLog4jTerm = new AbstractConstantImpl("http://example.org/test"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testDatatypeConstantConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"a\"^^"); - Term vLog4jTerm = new DatatypeConstantImpl("a", "http://example.org/test"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final Term vLog4jTerm = new DatatypeConstantImpl("a", "http://example.org/test"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testLanguageStringConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"Test\"@en"); - Term vLog4jTerm = new LanguageStringConstantImpl("Test", "en"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final Term vLog4jTerm = new LanguageStringConstantImpl("Test", "en"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test public void testNamedNullConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); - Term vLog4jTerm = new NamedNullImpl("_123"); - Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); + final Term vLog4jTerm = new NamedNullImpl("_123"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); assertEquals(vLog4jTerm, convertedTerm); } @Test(expected = IllegalArgumentException.class) public void testVariableConversion() { - karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "X"); VLogToModelConverter.toTerm(vLogTerm); } + @Test + public void testAbstractConstantContainingQuoteExpression() { + final String constName = "\""; + final Term convertedTerm = VLogToModelConverter + .toTerm(new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, constName)); + assertTrue(convertedTerm.isConstant()); + assertTrue(convertedTerm instanceof AbstractConstant); + assertEquals(constName, convertedTerm.getName()); + } + } From 989dd9ba0cf9afee23078a61eaa82af77fd570cf Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 25 Feb 2020 10:48:53 +0100 Subject: [PATCH 0742/1255] add Correctness in queryAnswersSize --- .../core/reasoner/QueryAnswersSize.java | 64 ++++++ .../vlog4j/core/reasoner/Reasoner.java | 9 +- .../reasoner/implementation/VLogReasoner.java | 14 +- .../implementation/QueryAnswerSizeTest.java | 206 +++++++++--------- 4 files changed, 183 insertions(+), 110 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java new file mode 100644 index 000000000..465d9bff6 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java @@ -0,0 +1,64 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * + * @author Larry González + * + */ +public class QueryAnswersSize { + + final Correctness correctness; + final long size; + + public QueryAnswersSize(Correctness correctness, int size) { + this.correctness = correctness; + this.size = size; + } + + public QueryAnswersSize(Correctness correctness, long size) { + this.correctness = correctness; + this.size = size; + } + + /** + * Returns the correctness of the query result. + *
              + *
            • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
            • + *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
            • + *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
            + * + * @return query result correctness + */ + public Correctness getCorrectness() { + return this.correctness; + } + + public long getSize() { + return this.size; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index cc5b62fe3..569ec414a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -337,14 +337,13 @@ static Reasoner getInstance() { * @return queryAnswerSize(query, true), the number of facts in the extension of * the query. */ - long queryAnswerSize(PositiveLiteral query); + QueryAnswersSize queryAnswerSize(PositiveLiteral query); // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the number of explicit facts materialised by - * the reasoner. - *
            + * loaded into the reasoner and the number of explicit facts materialised by the + * reasoner.
            * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer @@ -390,7 +389,7 @@ static Reasoner getInstance() { * named individuals). * @return number of facts in the extension of the query. */ - long queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 383dbfe66..16c028194 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswersSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -595,19 +596,22 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public long queryAnswerSize(PositiveLiteral query) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query) { return queryAnswerSize(query, true); } @Override - public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } Validate.notNull(query, "Query atom must not be null!"); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - long result = -1; + long result; try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { @@ -615,9 +619,9 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { } catch (NonExistingPredicateException e) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); - return 0; + result = 0; } - return result; + return new QueryAnswersSize(this.correctness, result); } @Override diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index b320f1e34..ec9639244 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -66,6 +66,8 @@ public class QueryAnswerSizeTest { private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); + private static final Fact factQc = Expressions.makeFact(predQ, c); + private static final Fact factQd = Expressions.makeFact(predQ, d); private static final Fact factQe = Expressions.makeFact(predQ, e); private static final Fact factQf = Expressions.makeFact(predQ, f); @@ -79,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -91,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -103,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -115,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -127,12 +129,16 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); } } @@ -142,15 +148,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -160,15 +166,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -178,15 +184,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -196,15 +202,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -214,25 +220,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -242,25 +248,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(6, reasoner.queryAnswerSize(Qx)); - assertEquals(6, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -270,17 +276,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Rxx, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxx, false)); + assertEquals(4, reasoner.queryAnswerSize(Rxx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxx, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(6, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Ryy, true)); - assertEquals(2, reasoner.queryAnswerSize(Ryy, false)); + assertEquals(4, reasoner.queryAnswerSize(Ryy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Ryy, false).getSize()); } } From d16d8afd92a1772399b35d99ff4f0de509084138 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Tue, 25 Feb 2020 10:48:53 +0100 Subject: [PATCH 0743/1255] add Correctness in queryAnswersSize; add test for facts --- .../core/reasoner/QueryAnswersSize.java | 64 ++++++ .../vlog4j/core/reasoner/Reasoner.java | 9 +- .../reasoner/implementation/VLogReasoner.java | 14 +- .../implementation/QueryAnswerSizeTest.java | 206 +++++++++--------- 4 files changed, 183 insertions(+), 110 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java new file mode 100644 index 000000000..465d9bff6 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java @@ -0,0 +1,64 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * + * @author Larry González + * + */ +public class QueryAnswersSize { + + final Correctness correctness; + final long size; + + public QueryAnswersSize(Correctness correctness, int size) { + this.correctness = correctness; + this.size = size; + } + + public QueryAnswersSize(Correctness correctness, long size) { + this.correctness = correctness; + this.size = size; + } + + /** + * Returns the correctness of the query result. + *
              + *
            • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
            • + *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
            • + *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
            + * + * @return query result correctness + */ + public Correctness getCorrectness() { + return this.correctness; + } + + public long getSize() { + return this.size; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index cc5b62fe3..569ec414a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -337,14 +337,13 @@ static Reasoner getInstance() { * @return queryAnswerSize(query, true), the number of facts in the extension of * the query. */ - long queryAnswerSize(PositiveLiteral query); + QueryAnswersSize queryAnswerSize(PositiveLiteral query); // TODO add examples to query javadoc /** * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the number of explicit facts materialised by - * the reasoner. - *
            + * loaded into the reasoner and the number of explicit facts materialised by the + * reasoner.
            * An answer to the query is the terms a fact that matches the {@code query}: * the fact predicate is the same as the {@code query} predicate, the * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer @@ -390,7 +389,7 @@ static Reasoner getInstance() { * named individuals). * @return number of facts in the extension of the query. */ - long queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 383dbfe66..16c028194 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswersSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -595,19 +596,22 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public long queryAnswerSize(PositiveLiteral query) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query) { return queryAnswerSize(query, true); } @Override - public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } Validate.notNull(query, "Query atom must not be null!"); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - long result = -1; + long result; try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { @@ -615,9 +619,9 @@ public long queryAnswerSize(PositiveLiteral query, boolean includeNulls) { } catch (NonExistingPredicateException e) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); - return 0; + result = 0; } - return result; + return new QueryAnswersSize(this.correctness, result); } @Override diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index b320f1e34..ec9639244 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -66,6 +66,8 @@ public class QueryAnswerSizeTest { private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); + private static final Fact factQc = Expressions.makeFact(predQ, c); + private static final Fact factQd = Expressions.makeFact(predQ, d); private static final Fact factQe = Expressions.makeFact(predQ, e); private static final Fact factQf = Expressions.makeFact(predQ, f); @@ -79,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -91,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -103,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -115,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); } } @@ -127,12 +129,16 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); } } @@ -142,15 +148,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(0, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -160,15 +166,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -178,15 +184,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px)); - assertEquals(0, reasoner.queryAnswerSize(Px, true)); - assertEquals(0, reasoner.queryAnswerSize(Px, false)); - assertEquals(2, reasoner.queryAnswerSize(Qx)); - assertEquals(2, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -196,15 +202,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxy)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); } } @@ -214,25 +220,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(4, reasoner.queryAnswerSize(Qx)); - assertEquals(4, reasoner.queryAnswerSize(Qx, true)); - assertEquals(2, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -242,25 +248,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px)); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); - assertEquals(6, reasoner.queryAnswerSize(Qx)); - assertEquals(6, reasoner.queryAnswerSize(Qx, true)); - assertEquals(4, reasoner.queryAnswerSize(Qx, false)); - assertEquals(2, reasoner.queryAnswerSize(Rxy)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false)); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true)); - assertEquals(0, reasoner.queryAnswerSize(Rey, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true)); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false)); - assertEquals(0, reasoner.queryAnswerSize(Rey, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false)); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false)); + assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx).getSize()); + assertEquals(6, reasoner.queryAnswerSize(Qx, true).getSize()); + assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + + assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); + + assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); + assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); } } @@ -270,17 +276,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true)); - assertEquals(2, reasoner.queryAnswerSize(Px, false)); + assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Rxx, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxx, false)); + assertEquals(4, reasoner.queryAnswerSize(Rxx, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxx, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Rxy, true)); - assertEquals(2, reasoner.queryAnswerSize(Rxy, false)); + assertEquals(6, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Rxy, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Ryy, true)); - assertEquals(2, reasoner.queryAnswerSize(Ryy, false)); + assertEquals(4, reasoner.queryAnswerSize(Ryy, true).getSize()); + assertEquals(2, reasoner.queryAnswerSize(Ryy, false).getSize()); } } From 37cf305f139a5d15307a8aec1c75f033e6c16b8a Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 08:58:57 +0100 Subject: [PATCH 0744/1255] call reasoner.queryAnswerSize.getSize() instead of reasoner.queryAnswerSize --- .../vlog4j/client/picocli/VLog4jClientMaterialize.java | 2 +- .../semanticweb/vlog4j/examples/CompareWikidataDBpedia.java | 6 +++--- .../org/semanticweb/vlog4j/examples/CountingTriangles.java | 6 +++--- .../java/org/semanticweb/vlog4j/examples/DoidExample.java | 2 +- .../vlog4j/examples/InMemoryGraphAnalysisExample.java | 6 ++++-- .../examples/core/SkolemVsRestrictedChaseTermination.java | 2 +- .../semanticweb/vlog4j/examples/graal/DoidExampleGraal.java | 4 ++-- 7 files changed, 15 insertions(+), 13 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index 99b483243..b5ac9ff08 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -230,7 +230,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q } private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { - System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query)); + System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query).getSize()); } private String queryOputputPath(final PositiveLiteral query) { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index ce4bd92c7..cbf837284 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -88,9 +88,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")); - final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")); - final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")); + final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")).getSize(); + final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); + final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 63e1cb98e..79c66c1e6 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -61,9 +61,9 @@ public static void main(final String[] args) throws IOException, ParsingExceptio /* Initialise reasoner and compute inferences */ reasoner.reason(); - final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")); - final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")).getSize(); + final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")).getSize(); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")).getSize(); System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 96f573d87..c947d534e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,7 +72,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)); + double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)).getSize(); System.out.println(" " + queryString + ": " + querySize); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 87bc9927e..23d64cf1c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -87,8 +87,10 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")); + final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")) + .getSize(); + final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + .getSize(); System.out .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 138c58d4b..5be95aebf 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.queryAnswerSize(queryHasPart) + " results for hasPart(?X, ?Y)."); + + reasoner.queryAnswerSize(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index e53b2845a..98bb8b7ac 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -131,10 +131,10 @@ public static void main(final String[] args) throws IOException { final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + reasoner.queryAnswerSize(humansWhoDiedOfCancer)); + + reasoner.queryAnswerSize(humansWhoDiedOfCancer).getSize()); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer)); + + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer).getSize()); System.out.println("Done."); } From 94178c76d8c210ec505b20723a8a5961e01d262e Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 09:24:43 +0100 Subject: [PATCH 0745/1255] call logWarningOnCorrectness in queryAnswerSize --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 1 + 1 file changed, 1 insertion(+) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 16c028194..22bb07ce7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -621,6 +621,7 @@ public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNu + " that does not occur in the knowledge base. Answer must be empty!"); result = 0; } + logWarningOnCorrectness(); return new QueryAnswersSize(this.correctness, result); } From 05bcbe552fc606d2f0557b1c4758595b10a17bf0 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 09:25:11 +0100 Subject: [PATCH 0746/1255] add and update javadoc --- .../core/reasoner/QueryAnswersSize.java | 41 +++++++++++++++++-- .../vlog4j/core/reasoner/Reasoner.java | 3 +- .../vlog4j/rdf/RdfModelConverter.java | 4 +- 3 files changed, 41 insertions(+), 7 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java index 465d9bff6..238145584 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java @@ -21,6 +21,31 @@ */ /** + * Container for correctness and size of a query. + * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + *
              + *
            • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
            • + *
            • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
                + *
              • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
              • + *
              • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
              • + *
              + *
            • + *
            • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
            * * @author Larry González * @@ -30,10 +55,14 @@ public class QueryAnswersSize { final Correctness correctness; final long size; - public QueryAnswersSize(Correctness correctness, int size) { - this.correctness = correctness; - this.size = size; - } + /** + * Constructor of QueryAnswerSize + * + * @param correctness of the evaluated query. See {@link Correctness}. + * + * @param size of the evaluated query, i.e. number of facts in the + * extension of the query. + */ public QueryAnswersSize(Correctness correctness, long size) { this.correctness = correctness; @@ -57,6 +86,10 @@ public Correctness getCorrectness() { return this.correctness; } + /** + * + * @return query result correctness + */ public long getSize() { return this.size; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 569ec414a..d57191648 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -387,7 +387,8 @@ static Reasoner getInstance() { * answers will only contain the {@link QueryResult}s with * terms of type {@link TermType#CONSTANT} (representing * named individuals). - * @return number of facts in the extension of the query. + * @return QueryAnswersSize that contains the Correctness and the number of + * facts in the extension of the query. */ QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java index d2f7f972b..0c58bb826 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java +++ b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java @@ -52,8 +52,8 @@ * String as name. *
          • {@link Literal}s are converted to {@link Constant}s with names containing * the canonical form of the literal label, the data type and the language.
          • - *
          • {@link BNode}s are converted to {@link NamedNull}s with the generated blank - * ID as name. {@link BNode}s have unique generated IDs in the context a + *
          • {@link BNode}s are converted to {@link NamedNull}s with the generated + * blank ID as name. {@link BNode}s have unique generated IDs in the context a * {@link Model}s. Blanks with the same name loaded from different models will * have different ids.
          • *
          From d18909f9d382b66a0eea1bc73d2365b783b60a09 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 09:28:42 +0100 Subject: [PATCH 0747/1255] add test: use a fact as a query --- .../core/reasoner/implementation/QueryAnswerSizeTest.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index ec9639244..818807ed1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -135,10 +135,15 @@ public void pFactsUniversalRule() throws IOException { assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPc, true).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factPd, true).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQc, true).getSize()); + assertEquals(1, reasoner.queryAnswerSize(factQd, true).getSize()); assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); + } } From 04629ada4589d74a638aa044d02f07cc97172658 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:19:17 +0100 Subject: [PATCH 0748/1255] add interface QueryAnswerSize and implementation QueryAnswerSizeImpl --- .../vlog4j/core/reasoner/QueryAnswerSize.java | 78 +++++++++++++++++++ .../QueryAnswerSizeImpl.java} | 0 2 files changed, 78 insertions(+) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/{QueryAnswersSize.java => implementation/QueryAnswerSizeImpl.java} (100%) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java new file mode 100644 index 000000000..d4522d875 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java @@ -0,0 +1,78 @@ +package org.semanticweb.vlog4j.core.reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Container for correctness and number of query answers, i.e. the number of + * facts that the query maps to. + * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + *
            + *
          • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
          • + *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
              + *
            • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
            • + *
            • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
            • + *
            + *
          • + *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
          + * + * @author Larry González + * + */ +public interface QueryAnswerSize { + + /** + * Returns the correctness of the query result. + *
            + *
          • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
          • + *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
          • + *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
          + * + * @return query result correctness + */ + Correctness getCorrectness(); + + /** + * + * @return number of query answers, i.e., the number of facts that the query + * maps to. + */ + long getSize(); + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java similarity index 100% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswersSize.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java From fad6ab6b5a8ddc1f8d24157a328e76721abc046e Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:21:50 +0100 Subject: [PATCH 0749/1255] add interface QueryAnswerSize and implementation QueryAnswerSizeImpl --- .../implementation/QueryAnswerSizeImpl.java | 69 +++++-------------- 1 file changed, 16 insertions(+), 53 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java index 238145584..504446f92 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java @@ -1,4 +1,7 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import org.semanticweb.vlog4j.core.reasoner.Correctness; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; /*- * #%L @@ -20,78 +23,38 @@ * #L% */ -/** - * Container for correctness and size of a query. - * - * Depending on the state of the reasoning (materialisation) and its - * {@link KnowledgeBase}, the answers can have a different {@link Correctness} - *
            - *
          • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current - * knowledge base has completed, and the query answers are guaranteed to be - * correct.
          • - *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed - * to be sound, but may be incomplete. This can happen - *
              - *
            • when materialisation has not completed ({@link Reasoner#reason()} returns - * {@code false}),
            • - *
            • or when the knowledge base was modified after reasoning, and the - * materialisation does not reflect the current knowledge base. - * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain - * complete query answers with respect to the current knowledge base.
            • - *
            - *
          • - *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some - * results may be unsound. This can happen when the knowledge base was modified - * and the reasoner materialisation is no longer consistent with the current - * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, - * in order to obtain correct query answers. - *
          - * - * @author Larry González - * - */ -public class QueryAnswersSize { +public class QueryAnswerSizeImpl implements QueryAnswerSize { - final Correctness correctness; - final long size; + final private Correctness correctness; + final private long size; /** * Constructor of QueryAnswerSize * * @param correctness of the evaluated query. See {@link Correctness}. * - * @param size of the evaluated query, i.e. number of facts in the + * @param size number of query answers, i.e. number of facts in the * extension of the query. */ - public QueryAnswersSize(Correctness correctness, long size) { + QueryAnswerSizeImpl(Correctness correctness, long size) { this.correctness = correctness; this.size = size; } - /** - * Returns the correctness of the query result. - *
            - *
          • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are - * guaranteed to be correct.
          • - *
          • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed - * to be sound, but may be incomplete.
          • - *
          • If {@link Correctness#INCORRECT}, the results may be incomplete, and some - * results may be unsound. - *
          - * - * @return query result correctness - */ + @Override public Correctness getCorrectness() { return this.correctness; } - /** - * - * @return query result correctness - */ + @Override public long getSize() { return this.size; } + @Override + public String toString() { + return this.size + " (" + this.correctness.toString() + ")"; + } + } From 6a57bce078687d63da853a2464e690dada32e8e8 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:22:56 +0100 Subject: [PATCH 0750/1255] fix javadoc; rename class --- .../vlog4j/core/reasoner/Reasoner.java | 17 ++++++++--------- .../reasoner/implementation/VLogReasoner.java | 8 ++++---- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index d57191648..df794ea28 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -11,6 +11,7 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; +import org.semanticweb.vlog4j.core.reasoner.implementation.QueryAnswerSizeImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; /* @@ -337,7 +338,7 @@ static Reasoner getInstance() { * @return queryAnswerSize(query, true), the number of facts in the extension of * the query. */ - QueryAnswersSize queryAnswerSize(PositiveLiteral query); + QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query); // TODO add examples to query javadoc /** @@ -380,17 +381,15 @@ static Reasoner getInstance() { * * @param query a {@link PositiveLiteral} representing the query to be * answered. - * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of - * type {@link TermType#NAMED_NULL} (representing anonymous - * individuals introduced to satisfy rule existentially - * quantified variables) will be included. Otherwise, the - * answers will only contain the {@link QueryResult}s with - * terms of type {@link TermType#CONSTANT} (representing - * named individuals). + * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} + * terms will be included in the {@link QueryAnswerSizeImpl}. + * Otherwise, facts with {@link TermType#NAMED_NULL} terms + * will be ignored. + * * @return QueryAnswersSize that contains the Correctness and the number of * facts in the extension of the query. */ - QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 22bb07ce7..c19c3caff 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -35,7 +35,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswersSize; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -596,12 +596,12 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswersSize queryAnswerSize(PositiveLiteral query) { + public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query) { return queryAnswerSize(query, true); } @Override - public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); @@ -622,7 +622,7 @@ public QueryAnswersSize queryAnswerSize(PositiveLiteral query, boolean includeNu result = 0; } logWarningOnCorrectness(); - return new QueryAnswersSize(this.correctness, result); + return new QueryAnswerSizeImpl(this.correctness, result); } @Override From b7b6d91b84c9447b9c3c9d3df40c35c5cf7f4ac0 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:34:33 +0100 Subject: [PATCH 0751/1255] test with unreleased vlog --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 908b769d3..e363e83b3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,9 +31,9 @@ jobs: - dist: trusty -## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: -# - sh ./build-vlog-library.sh +# Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar + before_install: + - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From f222fa9ac56ea03cf98e67170bf39a8afafbaa6b Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Thu, 27 Feb 2020 14:55:54 +0100 Subject: [PATCH 0752/1255] go back with released version of vlog --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index e363e83b3..908b769d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,9 +31,9 @@ jobs: - dist: trusty -# Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar - before_install: - - sh ./build-vlog-library.sh +## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar +# before_install: +# - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 663dda4778ca3ff6fcd1bfe2eee57de9746e6c33 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 17:52:03 +0100 Subject: [PATCH 0753/1255] force travis to rebuild vLog --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 908b769d3..2c2ffc4af 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,7 +33,7 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar # before_install: -# - sh ./build-vlog-library.sh + - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 6a508757a6f97821789001e812c9503e76a3788e Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 17:58:13 +0100 Subject: [PATCH 0754/1255] fix travis.yml --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2c2ffc4af..7c38cbb34 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,8 +32,7 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: - - sh ./build-vlog-library.sh +before_install: - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 76cca028b4d6f935c40209df425a40f987e7308a Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 18:07:12 +0100 Subject: [PATCH 0755/1255] comment back VLog build code in travis script --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7c38cbb34..908b769d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,7 +32,8 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -before_install: - sh ./build-vlog-library.sh +# before_install: +# - sh ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 608657a54f811d6f2556cbd28c088868f341e351 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 18:08:37 +0100 Subject: [PATCH 0756/1255] try make travis build vLog --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 908b769d3..719168552 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,8 +32,8 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: -# - sh ./build-vlog-library.sh +before_install: + - sh: ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From 9025d0fe700c4679a58a4baafba59b1f9df40b1d Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Thu, 27 Feb 2020 18:16:51 +0100 Subject: [PATCH 0757/1255] try again to fix travis script so build-vlog-library.sh gets executed --- .travis.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 719168552..45354e497 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,8 +32,11 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar +# before_install: +# - sh ./build-vlog-library.sh before_install: - - sh: ./build-vlog-library.sh + script: + - ./build-vlog-library.sh install: mvn install $OPTIONS -DskipTests=true From da5ace2a74ec71d1b1b9f8231e57985e68607666 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 27 Feb 2020 21:30:05 +0100 Subject: [PATCH 0758/1255] Don't run build-vlog-library.sh under bash Travis messes with the .bashrc, so any new shell spawned with TRAVIS_HOME unset will fail the build. Explicitly use dash (if it exists), or sh otherwise. --- .travis.yml | 7 +++---- build-vlog-library.sh | 0 2 files changed, 3 insertions(+), 4 deletions(-) mode change 100644 => 100755 build-vlog-library.sh diff --git a/.travis.yml b/.travis.yml index 45354e497..38e6bc229 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,11 +32,10 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar -# before_install: -# - sh ./build-vlog-library.sh before_install: - script: - - ./build-vlog-library.sh + # explicitly avoid bash as travis screws with .bashrc, + # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 + - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" install: mvn install $OPTIONS -DskipTests=true diff --git a/build-vlog-library.sh b/build-vlog-library.sh old mode 100644 new mode 100755 From 8b2e250d1d0727b9e026eaeb534cc2c99a39ff22 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 27 Feb 2020 22:20:33 +0100 Subject: [PATCH 0759/1255] Get rid of some warnings in .travis.yml --- .travis.yml | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/.travis.yml b/.travis.yml index 38e6bc229..66f6b2c3e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,10 +1,10 @@ language: java -matrix: +os: linux +jobs: include: - os: linux dist: bionic - jdk: - - openjdk11 + jdk: openjdk11 after_success: - mvn clean test jacoco:report coveralls:report @@ -19,18 +19,13 @@ matrix: - g++-6 - libstdc++6 env: CC=gcc-6 CXX=g++-6 - jdk: - - openjdk8 + jdk: openjdk8 - os: osx osx_image: xcode10.2 - - -jobs: allow_failures: - dist: trusty - ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar before_install: # explicitly avoid bash as travis screws with .bashrc, @@ -39,8 +34,6 @@ before_install: install: mvn install $OPTIONS -DskipTests=true -sudo: false - cache: directories: - ./local_builds From 47237ef2829e219ec5acaace9a91a9399e687a65 Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 28 Feb 2020 11:52:48 +0100 Subject: [PATCH 0760/1255] rename queryAnswerSize to countQueryAnswers --- .../picocli/VLog4jClientMaterialize.java | 2 +- .../vlog4j/core/reasoner/Reasoner.java | 23 +- .../reasoner/implementation/VLogReasoner.java | 21 +- .../implementation/QueryAnswerSizeTest.java | 216 +++++++++--------- .../examples/CompareWikidataDBpedia.java | 7 +- .../vlog4j/examples/CountingTriangles.java | 9 +- .../vlog4j/examples/DoidExample.java | 2 +- .../InMemoryGraphAnalysisExample.java | 4 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/DoidExampleGraal.java | 4 +- 10 files changed, 144 insertions(+), 146 deletions(-) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index b5ac9ff08..ac71cd0d2 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -230,7 +230,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q } private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { - System.out.println("Number of query answers in " + query + ": " + reasoner.queryAnswerSize(query).getSize()); + System.out.println("Number of query answers in " + query + ": " + reasoner.countQueryAnswers(query).getSize()); } private String queryOputputPath(final PositiveLiteral query) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 445799b44..bae08ac94 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -93,8 +93,7 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. * - * @param an - * OutputStream for the facts to be written to. + * @param an OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException @@ -105,8 +104,7 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to a desired file. * - * @param a - * String of the file path for the facts to be written to. + * @param a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException @@ -359,13 +357,16 @@ static Reasoner getInstance() { */ QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); - /* + /** * @param query a {@link PositiveLiteral} representing the query to be answered. * * @return queryAnswerSize(query, true), the number of facts in the extension of - * the query. + * the query. */ - QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query); + + default QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query) { + return countQueryAnswers(query, true); + } // TODO add examples to query javadoc /** @@ -409,14 +410,14 @@ static Reasoner getInstance() { * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} - * terms will be included in the {@link QueryAnswerSizeImpl}. - * Otherwise, facts with {@link TermType#NAMED_NULL} terms - * will be ignored. + * terms will be included in the + * {@link QueryAnswerSizeImpl}. Otherwise, facts with + * {@link TermType#NAMED_NULL} terms will be ignored. * * @return QueryAnswersSize that contains the Correctness and the number of * facts in the extension of the query. */ - QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls); + QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b71ba53a0..5959a3e92 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -246,8 +246,8 @@ void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource * Checks if the loaded external data sources do in fact contain data of the * correct arity. * - * @throws IncompatiblePredicateArityException - * to indicate a problem (non-checked exception) + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) */ void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { @@ -260,13 +260,11 @@ void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws I * Checks if the loaded external data for a given source does in fact contain * data of the correct arity for the given predidate. * - * @param predicate - * the predicate for which data is loaded - * @param dataSource - * the data source used + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used * - * @throws IncompatiblePredicateArityException - * to indicate a problem (non-checked exception) + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) */ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) throws IncompatiblePredicateArityException { @@ -409,12 +407,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query) { - return queryAnswerSize(query, true); - } - - @Override - public QueryAnswerSizeImpl queryAnswerSize(PositiveLiteral query, boolean includeNulls) { + public QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index 818807ed1..7a2f1af57 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -81,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -93,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -105,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -117,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); } } @@ -129,20 +129,20 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPc, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPd, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQc, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQd, true).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPc, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factPd, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQc, false).getSize()); - assertEquals(1, reasoner.queryAnswerSize(factQd, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPc, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPd, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQc, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQd, true).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPc, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factPd, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQc, false).getSize()); + assertEquals(1, reasoner.countQueryAnswers(factQd, false).getSize()); } } @@ -153,15 +153,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -171,15 +171,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -189,15 +189,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -207,15 +207,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); } } @@ -225,25 +225,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); } } @@ -253,25 +253,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Qx).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Qx, true).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Qx, false).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxy, false).getSize()); - - assertEquals(1, reasoner.queryAnswerSize(Rdy, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, true).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, true).getSize()); - - assertEquals(0, reasoner.queryAnswerSize(Rdy, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rey, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxd, false).getSize()); - assertEquals(0, reasoner.queryAnswerSize(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Qx).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Qx, true).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); } } @@ -281,17 +281,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.queryAnswerSize(Px, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Rxx, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxx, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Rxx, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxx, false).getSize()); - assertEquals(6, reasoner.queryAnswerSize(Rxy, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Rxy, false).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, false).getSize()); - assertEquals(4, reasoner.queryAnswerSize(Ryy, true).getSize()); - assertEquals(2, reasoner.queryAnswerSize(Ryy, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Ryy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Ryy, false).getSize()); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index cbf837284..5ca7a7404 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -88,9 +88,10 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double resultCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("result(?X)")).getSize(); - final double wdCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); - final double dbpCount = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); + final double resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) + .getSize(); + final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); + final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 79c66c1e6..8f6799447 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -61,9 +61,12 @@ public static void main(final String[] args) throws IOException, ParsingExceptio /* Initialise reasoner and compute inferences */ reasoner.reason(); - final double countries = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("country(?X)")).getSize(); - final double shareBorder = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")).getSize(); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")).getSize(); + final double countries = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("country(?X)")) + .getSize(); + final double shareBorder = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")) + .getSize(); + final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + .getSize(); System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index c947d534e..1cf8f1646 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,7 +72,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral(queryString)).getSize(); + double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getSize(); System.out.println(" " + queryString + ": " + querySize); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 23d64cf1c..5726478fa 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -87,9 +87,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio try (final Reasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - final double unreachable = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("unreachable(?X)")) + final double unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) .getSize(); - final double triangles = reasoner.queryAnswerSize(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) .getSize(); System.out diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 5be95aebf..21b5ef928 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.queryAnswerSize(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); + + reasoner.countQueryAnswers(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 98bb8b7ac..4fde8b97e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -131,10 +131,10 @@ public static void main(final String[] args) throws IOException { final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + reasoner.queryAnswerSize(humansWhoDiedOfCancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfCancer).getSize()); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + reasoner.queryAnswerSize(humansWhoDiedOfNoncancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfNoncancer).getSize()); System.out.println("Done."); } From 1f8f76f2ea3a5eb7fa7bb0ddd1fccba9d4246e7c Mon Sep 17 00:00:00 2001 From: Larry Gonzalez Date: Fri, 28 Feb 2020 12:08:58 +0100 Subject: [PATCH 0761/1255] fix javadoc; fix implort; add default method --- .../semanticweb/vlog4j/core/reasoner/Reasoner.java | 11 ++++++----- .../core/reasoner/implementation/VLogReasoner.java | 3 ++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index bae08ac94..51e84ce71 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -13,7 +13,6 @@ import org.semanticweb.vlog4j.core.model.api.QueryResult; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryAnswerSizeImpl; import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; /* @@ -360,11 +359,13 @@ static Reasoner getInstance() { /** * @param query a {@link PositiveLiteral} representing the query to be answered. * - * @return queryAnswerSize(query, true), the number of facts in the extension of - * the query. + * @return countQueryAnswers(query, true), i.e., the number of facts in the + * extension of the query, including answers with NamedNull terms that + * have been introduced during reasoning. See also + * {@link Reasoner#countQueryAnswers(PositiveLiteral, boolean)} */ - default QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query) { + default QueryAnswerSize countQueryAnswers(PositiveLiteral query) { return countQueryAnswers(query, true); } @@ -417,7 +418,7 @@ default QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query) { * @return QueryAnswersSize that contains the Correctness and the number of * facts in the extension of the query. */ - QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls); + QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 5959a3e92..9ffe011cf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -31,6 +31,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -407,7 +408,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswerSizeImpl countQueryAnswers(PositiveLiteral query, boolean includeNulls) { + public QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls) { validateNotClosed(); if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); From 2211f8a56b23a3c17a0bc3b1f6ff71d20e289d74 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:17:48 +0100 Subject: [PATCH 0762/1255] rename "size" to "count" in class and field names; fix javadoc --- .../picocli/VLog4jClientMaterialize.java | 2 +- ...yAnswerSize.java => QueryAnswerCount.java} | 4 +- .../vlog4j/core/reasoner/Reasoner.java | 40 ++-- ...izeImpl.java => QueryAnswerCountImpl.java} | 16 +- .../reasoner/implementation/VLogReasoner.java | 32 ++- .../implementation/QueryAnswerSizeTest.java | 216 +++++++++--------- .../examples/CompareWikidataDBpedia.java | 6 +- .../vlog4j/examples/CountingTriangles.java | 6 +- .../vlog4j/examples/DoidExample.java | 2 +- .../InMemoryGraphAnalysisExample.java | 4 +- .../SkolemVsRestrictedChaseTermination.java | 2 +- .../examples/graal/DoidExampleGraal.java | 4 +- 12 files changed, 168 insertions(+), 166 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/{QueryAnswerSize.java => QueryAnswerCount.java} (98%) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QueryAnswerSizeImpl.java => QueryAnswerCountImpl.java} (78%) diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java index ac71cd0d2..44969e879 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java @@ -230,7 +230,7 @@ private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral q } private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { - System.out.println("Number of query answers in " + query + ": " + reasoner.countQueryAnswers(query).getSize()); + System.out.println("Number of query answers in " + query + ": " + reasoner.countQueryAnswers(query).getCount()); } private String queryOputputPath(final PositiveLiteral query) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java index d4522d875..3438c5e34 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerSize.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java @@ -51,7 +51,7 @@ * @author Larry González * */ -public interface QueryAnswerSize { +public interface QueryAnswerCount { /** * Returns the correctness of the query result. @@ -73,6 +73,6 @@ public interface QueryAnswerSize { * @return number of query answers, i.e., the number of facts that the query * maps to. */ - long getSize(); + long getCount(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 51e84ce71..55985c1e2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -357,26 +357,32 @@ static Reasoner getInstance() { QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); /** + * * Evaluates an atomic ({@code query}), and counts the number of query answer + * implicit facts loaded into the reasoner and the number of query answer + * explicit facts materialised by the reasoner. + * * @param query a {@link PositiveLiteral} representing the query to be answered. * - * @return countQueryAnswers(query, true), i.e., the number of facts in the - * extension of the query, including answers with NamedNull terms that - * have been introduced during reasoning. See also - * {@link Reasoner#countQueryAnswers(PositiveLiteral, boolean)} + * @return a {@link QueryAnswerCount} object that contains the query answers + * {@link Correctness} and the number of query answers (i.e. the number + * of facts in the extension of the query), including answers with + * {@link NamedNull} terms that have been introduced during reasoning. + * See also + * {@link Reasoner#countQueryAnswers(PositiveLiteral, boolean)}. */ - default QueryAnswerSize countQueryAnswers(PositiveLiteral query) { - return countQueryAnswers(query, true); + default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { + return this.countQueryAnswers(query, true); } // TODO add examples to query javadoc /** - * Evaluates an atomic ({@code query}), and returns the number of implicit facts - * loaded into the reasoner and the number of explicit facts materialised by the - * reasoner.
          - * An answer to the query is the terms a fact that matches the {@code query}: - * the fact predicate is the same as the {@code query} predicate, the - * {@link TermType#CONSTANT} terms of the {@code query} appear in the answer + * Evaluates an atomic ({@code query}), and counts the number of query answer + * implicit facts loaded into the reasoner and the number of query answer + * explicit facts materialised by the reasoner.
          + * An answer to the query is the term set of a fact that matches the + * {@code query}: the fact predicate is the same as the {@code query} predicate, + * the {@link TermType#CONSTANT} terms of the {@code query} appear in the answer * fact at the same term position, and the {@link TermType#VARIABLE} terms of * the {@code query} are matched by terms in the fact, either named * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The @@ -411,14 +417,14 @@ default QueryAnswerSize countQueryAnswers(PositiveLiteral query) { * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} - * terms will be included in the - * {@link QueryAnswerSizeImpl}. Otherwise, facts with + * terms will be counted. Otherwise, facts with * {@link TermType#NAMED_NULL} terms will be ignored. * - * @return QueryAnswersSize that contains the Correctness and the number of - * facts in the extension of the query. + * @return a {@link QueryAnswerCount} object that contains the query answers + * Correctness and the number query answers, i.e. the number of facts in + * the extension of the query. */ - QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls); + QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls); // TODO add examples to query javadoc /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java similarity index 78% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java index 504446f92..68deeb6f0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeImpl.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -1,7 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; /*- * #%L @@ -23,10 +23,10 @@ * #L% */ -public class QueryAnswerSizeImpl implements QueryAnswerSize { +public class QueryAnswerCountImpl implements QueryAnswerCount { final private Correctness correctness; - final private long size; + final private long count; /** * Constructor of QueryAnswerSize @@ -37,9 +37,9 @@ public class QueryAnswerSizeImpl implements QueryAnswerSize { * extension of the query. */ - QueryAnswerSizeImpl(Correctness correctness, long size) { + QueryAnswerCountImpl(Correctness correctness, long size) { this.correctness = correctness; - this.size = size; + this.count = size; } @Override @@ -48,13 +48,13 @@ public Correctness getCorrectness() { } @Override - public long getSize() { - return this.size; + public long getCount() { + return this.count; } @Override public String toString() { - return this.size + " (" + this.correctness.toString() + ")"; + return this.count + " (" + this.correctness.toString() + ")"; } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 9ffe011cf..e8db05863 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -31,7 +31,7 @@ import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerSize; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; import org.semanticweb.vlog4j.core.reasoner.Reasoner; import org.semanticweb.vlog4j.core.reasoner.ReasonerState; @@ -383,11 +383,7 @@ private void runChase() { @Override public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); + validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); @@ -408,12 +404,8 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul } @Override - public QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeNulls) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); + public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { + validateBeforeQuerying(query); final boolean filterBlanks = !includeNulls; final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); @@ -429,17 +421,13 @@ public QueryAnswerSize countQueryAnswers(PositiveLiteral query, boolean includeN result = 0; } logWarningOnCorrectness(); - return new QueryAnswerSizeImpl(this.correctness, result); + return new QueryAnswerCountImpl(this.correctness, result); } @Override public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, final boolean includeBlanks) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); + validateBeforeQuerying(query); Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); @@ -458,6 +446,14 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St return this.correctness; } + private void validateBeforeQuerying(final PositiveLiteral query) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + } + @Override public Correctness writeInferences(OutputStream stream) throws IOException { validateNotClosed(); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java index 7a2f1af57..10f1b450b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java @@ -81,9 +81,9 @@ public void noFactsnoRules() throws IOException { final KnowledgeBase kb = new KnowledgeBase(); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -93,9 +93,9 @@ public void noFactsUniversalRule() throws IOException { kb.addStatement(QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -105,9 +105,9 @@ public void noFactsExistentialRule() throws IOException { kb.addStatement(RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -117,9 +117,9 @@ public void pFactsNoRules() throws IOException { kb.addStatements(factPc, factPd); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); } } @@ -129,20 +129,20 @@ public void pFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPc, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPd, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQc, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQd, true).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPc, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factPd, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQc, false).getSize()); - assertEquals(1, reasoner.countQueryAnswers(factQd, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQc, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQd, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPc, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPd, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQc, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQd, false).getCount()); } } @@ -153,15 +153,15 @@ public void pFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -171,15 +171,15 @@ public void qFactsUniversalRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -189,15 +189,15 @@ public void qFactsExistentialRule() throws IOException { kb.addStatements(factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(0, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(0, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -207,15 +207,15 @@ public void pFactsQFactsUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); } } @@ -225,25 +225,25 @@ public void pFactsQFactsExistentialRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); - - assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); - - assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getCount()); } } @@ -253,25 +253,25 @@ public void pFactsQFactsExistentialAndUniversalRule() throws IOException { kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); - assertEquals(6, reasoner.countQueryAnswers(Qx).getSize()); - assertEquals(6, reasoner.countQueryAnswers(Qx, true).getSize()); - assertEquals(4, reasoner.countQueryAnswers(Qx, false).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getSize()); - - assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getSize()); - - assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rey, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getSize()); - assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(6, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(6, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getCount()); } } @@ -281,17 +281,17 @@ public void pFactsLiteralWithSameVariables() throws IOException { kb.addStatements(factPc, factPd, RxxRxyRyyPx); try (VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.reason(); - assertEquals(2, reasoner.countQueryAnswers(Px, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Px, false).getSize()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); - assertEquals(4, reasoner.countQueryAnswers(Rxx, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxx, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Rxx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxx, false).getCount()); - assertEquals(6, reasoner.countQueryAnswers(Rxy, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Rxy, false).getSize()); + assertEquals(6, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, false).getCount()); - assertEquals(4, reasoner.countQueryAnswers(Ryy, true).getSize()); - assertEquals(2, reasoner.countQueryAnswers(Ryy, false).getSize()); + assertEquals(4, reasoner.countQueryAnswers(Ryy, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Ryy, false).getCount()); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java index 5ca7a7404..0e7d18b32 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java @@ -89,9 +89,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio reasoner.reason(); final double resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) - .getSize(); - final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getSize(); - final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getSize(); + .getCount(); + final double wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getCount(); + final double dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getCount(); System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + " were in Wikidata and " + dbpCount + " were in DBPedia"); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java index 8f6799447..a97e5438e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java @@ -62,11 +62,11 @@ public static void main(final String[] args) throws IOException, ParsingExceptio reasoner.reason(); final double countries = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("country(?X)")) - .getSize(); + .getCount(); final double shareBorder = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")) - .getSize(); + .getCount(); final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) - .getSize(); + .getCount(); System.out.print("Found " + countries + " countries in Wikidata"); // Due to symmetry, each joint border is found twice, hence we divide by 2: diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index 1cf8f1646..c88a900b9 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,7 +72,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getSize(); + double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount(); System.out.println(" " + queryString + ": " + querySize); } } diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java index 5726478fa..3a1702e91 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java @@ -88,9 +88,9 @@ public static void main(final String[] args) throws ParsingException, IOExceptio reasoner.reason(); final double unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) - .getSize(); + .getCount(); final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) - .getSize(); + .getCount(); System.out .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java index 21b5ef928..f9b46ff59 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java @@ -102,7 +102,7 @@ public static void main(final String[] args) throws IOException, ParsingExceptio * restrictions. */ System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.countQueryAnswers(queryHasPart).getSize() + " results for hasPart(?X, ?Y)."); + + reasoner.countQueryAnswers(queryHasPart).getCount() + " results for hasPart(?X, ?Y)."); /* * 6. We reset the reasoner to discard all inferences, and apply the Restricted diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java index 4fde8b97e..ecb80b742 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java @@ -131,10 +131,10 @@ public static void main(final String[] args) throws IOException { final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); System.out.println("Humans in Wikidata who died in 2018 due to cancer: " - + reasoner.countQueryAnswers(humansWhoDiedOfCancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfCancer).getCount()); System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " - + reasoner.countQueryAnswers(humansWhoDiedOfNoncancer).getSize()); + + reasoner.countQueryAnswers(humansWhoDiedOfNoncancer).getCount()); System.out.println("Done."); } From 667a787ac34f0e51a471e4c6325bb73eee1a66db Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:27:44 +0100 Subject: [PATCH 0763/1255] renamed test --- .../{QueryAnswerSizeTest.java => QueryAnswerCountTest.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/{QueryAnswerSizeTest.java => QueryAnswerCountTest.java} (99%) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java similarity index 99% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java index 10f1b450b..1a7dbd9aa 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerSizeTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java @@ -36,7 +36,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class QueryAnswerSizeTest { +public class QueryAnswerCountTest { private static final Predicate predP = Expressions.makePredicate("P", 1); private static final Predicate predQ = Expressions.makePredicate("Q", 1); From bfd60aadb09997719c58801a108e5da91c134fa2 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:31:26 +0100 Subject: [PATCH 0764/1255] added more test cases for queries without variables --- .../implementation/QueryAnswerCountTest.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java index 1a7dbd9aa..4e17d8bdf 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java @@ -65,6 +65,7 @@ public class QueryAnswerCountTest { private static final Fact factPc = Expressions.makeFact(predP, c); private static final Fact factPd = Expressions.makeFact(predP, d); + private static final Fact factPe = Expressions.makeFact(predP, e); private static final Fact factQc = Expressions.makeFact(predQ, c); private static final Fact factQd = Expressions.makeFact(predQ, d); @@ -84,6 +85,9 @@ public void noFactsnoRules() throws IOException { assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); } } @@ -96,6 +100,9 @@ public void noFactsUniversalRule() throws IOException { assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); } } @@ -108,6 +115,9 @@ public void noFactsExistentialRule() throws IOException { assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); } } @@ -120,6 +130,12 @@ public void pFactsNoRules() throws IOException { assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPe, true).getCount()); + } } From 556754f1afb398220e5cdc3272d59ca9cfe607b5 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:33:04 +0100 Subject: [PATCH 0765/1255] renamed querySize variable to answersCount --- .../java/org/semanticweb/vlog4j/examples/DoidExample.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java index c88a900b9..e919be73c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java @@ -72,8 +72,8 @@ public static void main(final String[] args) throws IOException, ParsingExceptio final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); System.out.println("\nNumber of inferred tuples for selected query atoms:"); for (final String queryString : queries) { - double querySize = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount(); - System.out.println(" " + queryString + ": " + querySize); + double answersCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount(); + System.out.println(" " + queryString + ": " + answersCount); } } } From 348ca05a5e4fa46eeabf72532dc1d7da22412cc4 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 17:44:26 +0100 Subject: [PATCH 0766/1255] comment lines that build vLog jar in travis.yml --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 66f6b2c3e..27c9754d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,9 +28,9 @@ jobs: ## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar before_install: - # explicitly avoid bash as travis screws with .bashrc, - # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 - - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" +# # explicitly avoid bash as travis screws with .bashrc, +# # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 +# - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" install: mvn install $OPTIONS -DskipTests=true From 3ef15b78203ee64e6ed427d71bb76a201fee5a20 Mon Sep 17 00:00:00 2001 From: Irina Dragoste Date: Fri, 28 Feb 2020 18:40:02 +0100 Subject: [PATCH 0767/1255] uncomment travis.yml line that builds VLog jar --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 27c9754d3..cb87765d8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -30,7 +30,7 @@ jobs: before_install: # # explicitly avoid bash as travis screws with .bashrc, # # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 -# - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" + - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" install: mvn install $OPTIONS -DskipTests=true From 8625f1574dee0be71dd46c69f7b73981586cce4c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 11:17:26 +0100 Subject: [PATCH 0768/1255] bump vlog version to require snapshot --- vlog4j-core/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml index 502f9053c..3c51c676d 100644 --- a/vlog4j-core/pom.xml +++ b/vlog4j-core/pom.xml @@ -17,7 +17,7 @@ Core components of VLog4j: reasoner and model - 1.3.2 + 1.3.3-snapshot From 9a522240b3a7529879826452e252d95db64ed544 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 11:20:25 +0100 Subject: [PATCH 0769/1255] Mention counting improvement --- RELEASE-NOTES.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index d3c2ed11d..5a1e07412 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,6 +1,18 @@ VLog4j Release Notes ==================== +VLog4j v0.6.0 +------------- + +Breaking changes: +* In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no + longer exist. It can be replaced by + `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` + +New features: +* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` + + VLog4j v0.5.0 ------------- From 6615608ade53900bd952a4ff567aadf2863f2a14 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 26 Nov 2019 02:17:06 +0100 Subject: [PATCH 0770/1255] Parser: Add support for configurable custom literal syntax --- .../parser/ConfigurableLiteralHandler.java | 44 +++++ .../vlog4j/parser/ParserConfiguration.java | 51 ++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 60 ++++++- .../parser/javacc/JavaCCParserBase.java | 48 +++++- .../RuleParserConfigurableLiteralTest.java | 154 ++++++++++++++++++ 5 files changed, 343 insertions(+), 14 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java new file mode 100644 index 000000000..a98bfec68 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java @@ -0,0 +1,44 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing a configurable literal expression. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface ConfigurableLiteralHandler { + /** + * Parse a Data Source Declaration. + * + * @param syntacticForm syntactic form of the literal expression. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when the given syntactic form is invalid. + * @return an appropriate @{link Constant} instance. + */ + public Constant parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 770c7fd16..3e89c30ea 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -47,6 +48,11 @@ public class ParserConfiguration { */ private final HashMap datatypes = new HashMap<>(); + /** + * The registered configurable literals. + */ + private HashMap literals = new HashMap<>(); + /** * Register a new (type of) Data Source. * @@ -136,6 +142,41 @@ private Constant parseDatatypeConstant(final String lexicalForm, final String da return Expressions.makeDatatypeConstant(lexicalForm, type); } + /** + * Check if a handler for this + * {@link org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter} + * is registered + * + * @param delimiter delimiter to check. + * @return true if a handler for the given delimiter is registered. + */ + public boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { + return literals.containsKey(delimiter); + } + + /** + * Parse a configurable literal. + * + * @param delimiter delimiter given for the syntactic form. + * @param syntacticForm syntantic form of the literal to parse. + * @param subParserFactory a {@link SubParserFactory} instance that creates + * parser with the same context as the current parser. + * + * @throws ParsingException when no handler for the literal is registered, or + * the given syntactic form is invalid. + * @return an appropriate {@link Constant} instance. + */ + public Constant parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + final SubParserFactory subParserFactory) throws ParsingException { + if (!isConfigurableLiteralRegistered(delimiter)) { + throw new ParsingException( + "No handler for configurable literal delimiter \"" + delimiter + "\" registered."); + } + + ConfigurableLiteralHandler handler = literals.get(delimiter); + return handler.parseLiteral(syntacticForm, subParserFactory); + } + /** * Register a new data type. * @@ -155,4 +196,14 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon this.datatypes.put(name, handler); return this; } + + public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimiter, + ConfigurableLiteralHandler handler) throws IllegalArgumentException { + if (literals.containsKey(delimiter)) { + throw new IllegalArgumentException("Literal delimiter \"" + delimiter + "\" is already registered."); + } + + this.literals.put(delimiter, handler); + return this; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 2a127ba9b..c84af1322 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -285,6 +285,11 @@ Term term(FormulaContext context) throws PrefixDeclarationException: return Expressions.makeExistentialVariable(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } +| try { + c = ConfigurableLiteral () { return c; } + } catch (ParsingException e) { + throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); + } } /** [16] */ @@ -309,6 +314,42 @@ Constant RDFLiteral() throws PrefixDeclarationException: { return createConstant(lex, lang, dt); } } +Constant ConfigurableLiteral() throws ParsingException: +{ + Token t; +} +{ + ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) + t = < PIPE_DELIMINATED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, + stripDelimiters(t.image, 1), + getSubParserFactory()); + } + | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) + t = < HASH_DELIMINATED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, + stripDelimiters(t.image, 1), + getSubParserFactory()); + } + // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, + // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) + // t = < BRACKET_DELIMINATED_LITERAL > { + // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, + // stripDelimiters(t.image, 1), + // getSubParserFactory()); + // } + | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) + t = < BRACE_DELIMINATED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, + stripDelimiters(t.image, 1), + getSubParserFactory()); + } + ) +} + String Langtag() : { Token t; @@ -330,11 +371,10 @@ String String(): String lex; } { - ( - t = < STRING_LITERAL1 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL2 > { lex = stripQuotes(t.image); } - | t = < STRING_LITERAL_LONG1 > { lex = stripQuotes3(t.image); } - | t = < STRING_LITERAL_LONG2 > { lex = stripQuotes3(t.image); } + ( t = < STRING_LITERAL1 > { lex = stripDelimiters(t.image, 1); } + | t = < STRING_LITERAL2 > { lex = stripDelimiters(t.image, 1); } + | t = < STRING_LITERAL_LONG1 > { lex = stripDelimiters(t.image, 3); } + | t = < STRING_LITERAL_LONG2 > { lex = stripDelimiters(t.image, 3); } ) { lex = unescapeStr(lex, t.beginLine, t.beginColumn); @@ -394,7 +434,7 @@ String IRIREF() : t = < IRI > { // we remove '<' and '>' - return t.image.substring(1,t.image.length()-1); + return stripDelimiters(t.image, 1); } } @@ -583,3 +623,11 @@ TOKEN : < PN_CHARS > )? > } + +TOKEN : +{ + < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT +| < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT +// | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT +| < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 63173e270..f55801423 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -47,6 +47,7 @@ * * @author Markus Kroetzsch * @author Larry Gonzalez + * @author Maximilian Marx * @author Jena developers, Apache Software Foundation (ASF) * */ @@ -88,6 +89,35 @@ public enum FormulaContext { BODY } + /** + * Defines delimiters for configurable literals. + * + * Since the parser is generated from a fixed grammar, we need to provide + * productions for these literals, even if they are not part of the syntax. With + * the {@link DefaultParserConfiguration}, any occurence of these literals will + * result in a {@link ParseException}. + * + * @author Maximilian Marx + */ + public enum ConfigurableLiteralDelimiter { + /** + * Literals of the form {@code |…|} + */ + PIPE, + /** + * Literals of the form {@code #…#} + */ + HASH, + /** + * Literals of the form {@code […]} + */ + BRACKET, + /** + * Literals of the form {@code {…}} + */ + BRACE, + } + public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); this.prefixDeclarations = new LocalPrefixDeclarations(); @@ -211,14 +241,16 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i return sb.toString(); } - /** Remove first and last characters (e.g. ' or "") from a string */ - static String stripQuotes(String s) { - return s.substring(1, s.length() - 1); - } - - /** Remove first 3 and last 3 characters (e.g. ''' or """) from a string */ - static String stripQuotes3(String s) { - return s.substring(3, s.length() - 3); + /** + * Remove the first and last {@code n} characters from string {@code s} + * + * @param s string to strip delimiters from + * @param n number of characters to strip from both ends + * + * @return the stripped string. + */ + static String stripDelimiters(String s, int n) { + return s.substring(n, s.length() - n); } /** remove the first n charcacters from the string */ diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java new file mode 100644 index 000000000..ff6de9d2e --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -0,0 +1,154 @@ +package org.semanticweb.vlog4j.syntax.parser; + +/*- + * #%L + * VLog4j Parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.junit.Ignore; +import org.mockito.ArgumentMatchers; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.parser.ConfigurableLiteralHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +public class RuleParserConfigurableLiteralTest { + public static final Constant pipeConstant = Expressions.makeAbstractConstant("testPipe"); + public static final Constant hashConstant = Expressions.makeAbstractConstant("testHash"); + public static final Constant bracketConstant = Expressions.makeAbstractConstant("testBracket"); + public static final Constant braceConstant = Expressions.makeAbstractConstant("testBrace"); + + public static final ConfigurableLiteralHandler pipeHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.PIPE, pipeConstant); + public static final ConfigurableLiteralHandler hashHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.HASH, hashConstant); + public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACKET, bracketConstant); + public static final ConfigurableLiteralHandler braceHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACE, braceConstant); + + @Test(expected = ParsingException.class) + public void testNoDefaultPipeLiteral() throws ParsingException { + RuleParser.parseLiteral("p(|test|)"); + } + + @Test + public void testCustomLiteralRegistration() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); + assertTrue("Configurable Literal Handler has been registered", + parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE)); + } + + @Test(expected = IllegalArgumentException.class) + public void testNoDuplicateCustomLiteralRegistration() throws ParsingException, IllegalArgumentException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PIPE, hashHandler); + } + + @Test + public void testCustomPipeLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); + Literal result = RuleParser.parseLiteral("p(|test|)", parserConfiguration); + assertEquals(pipeConstant, result.getConstants().toArray()[0]); + } + + @Test + public void testCustomHashLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler); + Literal result = RuleParser.parseLiteral("p(#test#)", parserConfiguration); + assertEquals(hashConstant, result.getConstants().toArray()[0]); + } + + @Test + @Ignore + public void testCustomBracketLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); + assertEquals(bracketConstant, result.getConstants().toArray()[0]); + } + + @Test + public void testCustomBraceLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); + Literal result = RuleParser.parseLiteral("p({test})", parserConfiguration); + assertEquals(braceConstant, result.getConstants().toArray()[0]); + } + + @Test + @Ignore + public void testMixedCustomLiterals() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); + Literal result = RuleParser.parseLiteral("p(||, #test#, [[], {})", parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(pipeConstant, hashConstant, bracketConstant, braceConstant)); + assertEquals(expected, constants); + } + + @Test + public void testNonTrivialCustomPipeLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p(|" + label + "|)"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); + } + + static Constant makeReversedConstant(String name) { + StringBuilder builder = new StringBuilder(name); + return Expressions.makeAbstractConstant(builder.reverse().toString()); + } + + static ConfigurableLiteralHandler getMockLiteralHandler(ConfigurableLiteralDelimiter delimiter, Constant constant) { + ConfigurableLiteralHandler handler = mock(ConfigurableLiteralHandler.class); + try { + doReturn(constant).when(handler).parseLiteral(ArgumentMatchers.anyString(), + ArgumentMatchers.any()); + } catch (ParsingException e) { + // ignore it, since the mock will not throw + } + return handler; + } + +} From 2bc503b755eb790cf58f2a028ec845d3eacc0927 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 23 Jan 2020 17:52:25 +0100 Subject: [PATCH 0771/1255] Parser: Fix typo --- .../src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 431a01d0f..6b7a5eec2 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -182,7 +182,7 @@ public static Literal parseLiteral(final String input) throws ParsingException { public static PositiveLiteral parsePositiveLiteral(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { - return parseSyntaxFragment(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positivel literal", + return parseSyntaxFragment(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positive literal", parserConfiguration); } From 1617836bb89bf290666ae579a665672e01831eb4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 24 Jan 2020 18:23:09 +0100 Subject: [PATCH 0772/1255] Parser: Make grammar stateful --- ...eryResultDataSourceDeclarationHandler.java | 12 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 503 +++++++++--------- 2 files changed, 246 insertions(+), 269 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 3524fcb0f..bfed050f4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -47,11 +47,13 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto String endpoint = arguments.get(0); JavaCCParser parser = subParserFactory.makeSubParser(endpoint); String parsedEndpoint; - try { - parsedEndpoint = parser.IRI(false); - } catch (ParseException | PrefixDeclarationException e) { - throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); - } + // try { + /// @TODO: actually make sure that this is a valid IRI + parsedEndpoint = endpoint.substring(1, endpoint.length() - 1); + //parsedEndpoint = parser.quotedIri(); + // } catch (ParseException | PrefixDeclarationException e) { + // throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); + // } URL endpointUrl; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index c84af1322..c90c5e5da 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -20,7 +20,9 @@ import java.net.URL; import java.net.MalformedURLException; import java.util.List; +import java.util.Deque; import java.util.ArrayList; +import java.util.ArrayDeque; import java.util.LinkedList; import org.semanticweb.vlog4j.parser.ParsingException; @@ -57,43 +59,61 @@ public class JavaCCParser extends JavaCCParserBase PARSER_END(JavaCCParser) +TOKEN_MGR_DECLS : { + // use initializer block to work around auto-generated constructors. + { + states = new ArrayDeque(); + } -void parse() throws PrefixDeclarationException: -{ + Deque states; + + void pushState() { + states.push(curLexState); + } + + void popState() { + SwitchTo(states.pop()); + } } -{ - ( base() )? - ( prefix() )* - ( source() )* - ( statement() )* - < EOF > + + +void parse() throws PrefixDeclarationException : { +} { + ( base() )? + ( prefix() )* + ( source() )* + ( statement() )* + < EOF > } -void base() throws PrefixDeclarationException: -{ - String iriString; +void base() throws PrefixDeclarationException : { + Token iri; +} { + < BASE > iri = < IRI_ABSOLUTE > < DOT > { + prefixDeclarations.setBase(iri.image); + } } -{ - < BASE > iriString = IRIREF() < DOT > - { - prefixDeclarations.setBase(iriString); - } + +void prefix() throws PrefixDeclarationException : { + Token pn; + String iri; +} { + < PREFIX > pn = < PNAME_NS > iri = absoluteIri() < DOT > { + prefixDeclarations.setPrefix(pn.image, iri); + } } -void prefix() throws PrefixDeclarationException: -{ - Token t; - String iriString; +String absoluteIri() throws PrefixDeclarationException : { + Token iri; +} { + iri = < IRI_ABSOLUTE > { return prefixDeclarations.absolutize(iri.image); } + | iri = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(iri.image); } } -{ - ( - LOOKAHEAD(< COLON >) < PREFIX > t = < COLON > iriString = IRIREF() < DOT > - | < PREFIX > t = < PNAME_NS > iriString = IRIREF() < DOT > - ) - { - //note that prefix includes the colon (:) - prefixDeclarations.setPrefix(t.image, iriString); - } + +String quotedIri() throws PrefixDeclarationException : { + String iri; +} { + iri = absoluteIri() { return "<" + iri + ">"; } } void source() throws PrefixDeclarationException: @@ -103,7 +123,7 @@ void source() throws PrefixDeclarationException: Token arity; } { - < SOURCE > predicateName = predicateName() < LBRACK > arity = < INTEGER > < RBRACK > < COLON > dataSource = dataSource() < DOT > + < SOURCE > predicateName = predicateName() arity = < ARITY > < COLON > dataSource = dataSource() < DOT > { int nArity; nArity = Integer.parseInt(arity.image); @@ -119,7 +139,7 @@ DataSource dataSource() throws PrefixDeclarationException: List< String > arguments; } { - (sourceName = < DIRECTIVENAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > + (sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > { try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); @@ -246,14 +266,12 @@ List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationExcept { return list; } } -String predicateName() throws PrefixDeclarationException: -{ - String s; +String predicateName() throws PrefixDeclarationException : { Token t; -} -{ - s = IRI(false) { return s; } -| t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } + String s; +} { + s = absoluteIri() { return s; } + | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } } Term term(FormulaContext context) throws PrefixDeclarationException: @@ -285,11 +303,11 @@ Term term(FormulaContext context) throws PrefixDeclarationException: return Expressions.makeExistentialVariable(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } -| try { - c = ConfigurableLiteral () { return c; } - } catch (ParsingException e) { - throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); - } +// | try { +// c = ConfigurableLiteral () { return c; } +// } catch (ParsingException e) { +// throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); +// } } /** [16] */ @@ -314,41 +332,41 @@ Constant RDFLiteral() throws PrefixDeclarationException: { return createConstant(lex, lang, dt); } } -Constant ConfigurableLiteral() throws ParsingException: -{ - Token t; -} -{ - ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) - t = < PIPE_DELIMINATED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, - stripDelimiters(t.image, 1), - getSubParserFactory()); - } - | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) - t = < HASH_DELIMINATED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, - stripDelimiters(t.image, 1), - getSubParserFactory()); - } - // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, - // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) - // t = < BRACKET_DELIMINATED_LITERAL > { - // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, - // stripDelimiters(t.image, 1), - // getSubParserFactory()); - // } - | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) - t = < BRACE_DELIMINATED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, - stripDelimiters(t.image, 1), - getSubParserFactory()); - } - ) -} +// Constant ConfigurableLiteral() throws ParsingException: +// { +// Token t; +// } +// { +// ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, +// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) +// t = < PIPE_DELIMINATED_LITERAL > { +// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, +// stripDelimiters(t.image, 1), +// getSubParserFactory()); +// } +// | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, +// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) +// t = < HASH_DELIMINATED_LITERAL > { +// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, +// stripDelimiters(t.image, 1), +// getSubParserFactory()); +// } +// // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, +// // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) +// // t = < BRACKET_DELIMINATED_LITERAL > { +// // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, +// // stripDelimiters(t.image, 1), +// // getSubParserFactory()); +// // } +// | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, +// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) +// t = < BRACE_DELIMINATED_LITERAL > { +// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, +// stripDelimiters(t.image, 1), +// getSubParserFactory()); +// } +// ) +// } String Langtag() : { @@ -371,10 +389,10 @@ String String(): String lex; } { - ( t = < STRING_LITERAL1 > { lex = stripDelimiters(t.image, 1); } - | t = < STRING_LITERAL2 > { lex = stripDelimiters(t.image, 1); } - | t = < STRING_LITERAL_LONG1 > { lex = stripDelimiters(t.image, 3); } - | t = < STRING_LITERAL_LONG2 > { lex = stripDelimiters(t.image, 3); } + ( t = < SINGLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } + | t = < DOUBLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } + | t = < TRIPLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } + | t = < SIXFOLD_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } ) { lex = unescapeStr(lex, t.beginLine, t.beginColumn); @@ -388,7 +406,7 @@ LinkedList< String > Arguments() throws PrefixDeclarationException: LinkedList< String > rest = new LinkedList< String >(); } { - (str = String() | str = IRI(true)) [< COMMA > rest = Arguments()] + (str = String() | str = quotedIri()) [< COMMA > rest = Arguments()] { rest.addFirst(str); return rest; @@ -401,7 +419,7 @@ String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: } { ( - iri = IRIREF() + iri = IRIREF() | iri = PrefixedName() ) { @@ -439,195 +457,152 @@ String IRIREF() : } // ------------------------------------------ + // Whitespace -SKIP : -{ - " " -| "\t" -| "\n" -| "\r" -| "\f" +< * > SKIP : { + < WHITESPACE : [ " ", "\t", "\n", "\r", "\f" ] > } -//Comments -SKIP :{< "%" (~["\n"])* "\n" >} +// Comments +< * > SKIP : { + < COMMENT : "%" ( ~[ "\n" ] )* "\n" > +} // ------------------------------------------ -TOKEN : -{ - < PREFIX : "@prefix" > -| < BASE : "@base" > -| < SOURCE : "@source" > +MORE : { + "@": DIRECTIVE +} + +< DEFAULT, TERM, DIRECTIVE_ARGUMENTS > MORE : { + "<" { pushState(); } : ABSOLUTE_IRI +} + +< DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS > TOKEN : { + < VARORPREDNAME : < A2Z> (< A2ZN >)* > + | < #A2Z : [ "a"-"z", "A"-"Z" ] > + | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > + | < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > + | < PNAME_NS : < PN_PREFIX > ":" > + | < #PN_CHARS_BASE : [ "a"-"z", "A"-"Z", "\u00c0"-"\u00d6", + "\u00d8"-"\u00f6", "\u00f8"-"\u02ff", + "\u0370"-"\u037d", "\u037f"-"\u1fff", + "\u200c"-"\u200d", "\u2070"-"\u218f", + "\u2c00"-"\u2fef", "\u3001"-"\ud7ff", + "\uf900"-"\ufffd" ] > + | < #PN_CHARS_U : < PN_CHARS_BASE > | "_" > + | < #PN_CHARS : ( < PN_CHARS_U > | [ "-", "0"-"9", "\u00b7", + "\u0300"-"\u036f", + "\u203f"-"\u2040" ] ) > + | < #PN_PREFIX : < PN_CHARS_BASE > + ( ( < PN_CHARS > | "." )* < PN_CHARS > )? > + | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) + ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > + | < COMMA : "," > + | < LPAREN : "(" > { + pushState(); + + if (curLexState == DEFAULT || curLexState == BODY) { + SwitchTo(TERM); + } + } + | < RPAREN : ")" > { popState(); } } -TOKEN: -{ - < INTEGER : ([ "-", "+" ])? < DIGITS > > -| < DECIMAL : - ([ "-", "+" ])? - ( - (< DIGITS >)+ "." (< DIGITS >)* - | "." (< DIGITS >)+ - ) - > -| < DOUBLE : - ([ "+", "-" ])? - ( - ([ "0"-"9" ])+ "." ([ "0"-"9" ])* < EXPONENT > - | "." ([ "0"-"9" ])+ (< EXPONENT >) - | ([ "0"-"9" ])+ < EXPONENT > - ) - > -| < #DIGITS : ([ "0"-"9" ])+ > -| < #EXPONENT : [ "e", "E" ] ([ "+", "-" ])? ([ "0"-"9" ])+ > +< TERM, DIRECTIVE_ARGUMENTS > TOKEN : { + < INTEGER : (< SIGN >)? < DIGITS > > + | < DECIMAL : (< SIGN >)? ( < DIGITS > "." (< DIGIT >)* + | "." < DIGITS > ) > + | < DOUBLE : (< SIGN >)? ( < DIGITS > "." (< DIGIT >)* < EXPONENT > + | "." (< DIGITS >) (< EXPONENT >) + | < DIGITS > < EXPONENT > ) > + | < #SIGN : [ "+", "-" ] > + | < #DIGIT : [ "0"-"9" ] > + | < #DIGITS : (< DIGIT >)+ > + | < #EXPONENT : [ "e", "E" ] (< SIGN >)? < DIGITS > > + | < COLON : ":" > } -TOKEN: -{ - < STRING_LITERAL1 : - // Single quoted string - "'" - ( - (~[ "'", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "'" > -| < STRING_LITERAL2 : - // Double quoted string - "\"" - ( - (~[ "\"", "\\", "\n", "\r" ]) - | < ECHAR > - )* - "\"" > -| < STRING_LITERAL_LONG1 : - "'''" - ( - ~[ "'", "\\" ] - | < ECHAR > - | ("'" ~[ "'" ]) - | ("''" ~[ "'" ]) - )* - "'''" > -| < STRING_LITERAL_LONG2 : - "\"\"\"" - ( - ~[ "\"", "\\" ] - | < ECHAR > - | ("\"" ~[ "\"" ]) - | ("\"\"" ~[ "\"" ]) - )* - "\"\"\"" > -| < #ECHAR : - "\\" - ( - "t" - | "b" - | "n" - | "r" - | "f" - | "\\" - | "\"" - | "'" - ) > -} - -TOKEN : -{ - // Includes # for relative URIs - < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > -| < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > -| < PNAME_NS : < PN_PREFIX > ":" > -| < UNIVAR : < QMARK > < VARORPREDNAME > > -| < EXIVAR : < EMARK > < VARORPREDNAME > > -| < LANGTAG : - < AT > (< A2Z >)+ - ( - "-" (< A2ZN >)+ - )* > -| < VARORPREDNAME : < A2Z> (< A2ZN >)* > -| < #A2Z : [ "a"-"z", "A"-"Z" ] > -| < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > -| < DIRECTIVENAME : < A2Z > (< A2ZNX >)* > -| < #A2ZNX : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > -} - -TOKEN : -{ - < LPAREN : "(" > -| < RPAREN : ")" > -| < LBRACK : "[" > -| < RBRACK : "]" > -| < COMMA : "," > -| < DOT : "." > -| < ARROW : ":-" > -| < QMARK : "?" > -| < EMARK : "!" > -| < TILDE : "~" > -| < COLON : ":" > -| < DATATYPE : "^^" > -| < AT : "@" > -} - -TOKEN : -{ - < #PN_CHARS_BASE : - [ "A"-"Z" ] - | [ "a"-"z" ] - | [ "\u00c0"-"\u00d6" ] - | [ "\u00d8"-"\u00f6" ] - | [ "\u00f8"-"\u02ff" ] - | [ "\u0370"-"\u037d" ] - | [ "\u037f"-"\u1fff" ] - | [ "\u200c"-"\u200d" ] - | [ "\u2070"-"\u218f" ] - | [ "\u2c00"-"\u2fef" ] - | [ "\u3001"-"\ud7ff" ] - | [ "\uf900"-"\ufffd" ] - > - // | [ ""#x10000-#xEFFFF] -| - < #PN_CHARS_U : - < PN_CHARS_BASE > - | "_" > -| < #PN_CHARS : - ( - < PN_CHARS_U > - | "-" - | [ "0"-"9" ] - | "\u00b7" - | [ "\u0300"-"\u036f" ] - | [ "\u203f"-"\u2040" ] - ) > -| < #PN_PREFIX : - < PN_CHARS_BASE > - ( - ( - < PN_CHARS > - | "." - )* - < PN_CHARS > - )? > -| < #PN_LOCAL : - ( - < PN_CHARS_U > - | ":" - | [ "0"-"9" ] - ) - ( - ( - < PN_CHARS > - | "." - | ":" - )* - < PN_CHARS > - )? > +TOKEN : { + < ARROW : ":-" > : BODY } -TOKEN : -{ - < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT -| < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT -// | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT -| < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT +< DEFAULT, BODY > TOKEN : { + < TILDE : "~" > } + +< ABSOLUTE_IRI > TOKEN : { + < IRI_ABSOLUTE : (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > { + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + popState(); + } +} + +< DIRECTIVE > TOKEN : { + < BASE : "base" > : DIRECTIVE_ARGUMENTS + | < PREFIX : "prefix" > : DIRECTIVE_ARGUMENTS + | < SOURCE : "source" > : DIRECTIVE_ARGUMENTS + | < CUSTOM : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS + | < DIRECTIVENAME : [ "a"-"z", "A"-"Z" ] ([ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ])* > +} + +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { + < DOT : "." > : DEFAULT +} + +< DIRECTIVE_ARGUMENTS > TOKEN : { + < ARITY : "[" < INTEGER > "]" > { + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } + | < ARGUMENT_NAME : < DIRECTIVENAME > > +} + +< TERM > TOKEN : { + < UNIVAR : "?" < VARORPREDNAME > > + | < EXIVAR : "!" < VARORPREDNAME > > + | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > + | < DATATYPE : "^^" > + | < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > +} + +< TERM, DIRECTIVE_ARGUMENTS > MORE : { + < "'" > { pushState(); } : SINGLE_QUOTED + | < "\"" > { pushState(); } : DOUBLE_QUOTED + | < "'''" > { pushState(); }: TRIPLE_QUOTED + | < "\"\"\"" > { pushState(); } : SIXFOLD_QUOTED +} + +< SINGLE_QUOTED > TOKEN : { + < SINGLE_QUOTED_STRING : ( ~[ "'", "\\", "\n", "\r" ] + | < ESCAPE_SEQUENCE > )* "'" > { popState(); } +} + +< DOUBLE_QUOTED > TOKEN : { + < DOUBLE_QUOTED_STRING : ( ~[ "\"", "\\", "\n", "\r" ] + | < ESCAPE_SEQUENCE > )* "\"" > { popState(); } +} + +< TRIPLE_QUOTED > TOKEN : { + < TRIPLE_QUOTED_STRING : ( ~[ "'", "\\" ] + | < ESCAPE_SEQUENCE > + | ( "'" ~[ "'" ] ) + | ( "''" ~[ "'" ] ) )* "'''" > { popState(); } +} + +< SIXFOLD_QUOTED > TOKEN : { + < SIXFOLD_QUOTED_STRING : ( ~[ "\"", "\\" ] + | < ESCAPE_SEQUENCE > + | ( "\"" ~[ "\"" ] ) + | ( "\"\"" ~[ "\"" ] ) )* "\"\"\"" > { popState(); } +} + +< SINGLE_QUOTED, DOUBLE_QUOTED, TRIPLE_QUOTED, SIXFOLD_QUOTED > MORE : { + < ESCAPE_SEQUENCE : "\\" [ "t", "b", "n", "r", "f", "\\", "\"", "'" ] > +} + +// TOKEN : +// { +// < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT +// | < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT +// // | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT +// | < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT +// } From e3fb878830e2129772471f13cfe5e096e66609c9 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 29 Jan 2020 17:08:37 +0100 Subject: [PATCH 0773/1255] Parser: Simplify constant handling, support configurable literals --- .../vlog4j/parser/ParserConfiguration.java | 19 +- ...eryResultDataSourceDeclarationHandler.java | 12 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 423 ++++++++---------- .../parser/javacc/JavaCCParserBase.java | 17 +- .../parser/ParserConfigurationTest.java | 7 - .../RuleParserConfigurableLiteralTest.java | 55 ++- 6 files changed, 251 insertions(+), 282 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 3e89c30ea..096a1a06c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -106,32 +106,17 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin } /** - * Parse a constant with optional data type and language tag. + * Parse a constant with optional data type. * * @param lexicalForm the (unescaped) lexical form of the constant. * @param languageTag the language tag, or null if not present. * @param the datatype, or null if not present. - * @pre At most one of {@code languageTag} and {@code datatype} may be non-null. * * @throws ParsingException when the lexical form is invalid for the * given data type. - * @throws IllegalArgumentException when both {@code languageTag} and - * {@code datatype} are non-null. * @return the {@link Constant} corresponding to the given arguments. */ - public Constant parseConstant(final String lexicalForm, final String languageTag, final String datatype) - throws ParsingException, IllegalArgumentException { - Validate.isTrue((languageTag == null) || (datatype == null), - "A constant with a language tag may not explicitly specify a data type."); - - if (languageTag != null) { - return Expressions.makeLanguageStringConstant(lexicalForm, languageTag); - } else { - return this.parseDatatypeConstant(lexicalForm, datatype); - } - } - - private Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { + public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { final String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); final DatatypeConstantHandler handler = this.datatypes.get(type); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index bfed050f4..b49683115 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -47,13 +47,11 @@ public DataSource handleDeclaration(List arguments, final SubParserFacto String endpoint = arguments.get(0); JavaCCParser parser = subParserFactory.makeSubParser(endpoint); String parsedEndpoint; - // try { - /// @TODO: actually make sure that this is a valid IRI - parsedEndpoint = endpoint.substring(1, endpoint.length() - 1); - //parsedEndpoint = parser.quotedIri(); - // } catch (ParseException | PrefixDeclarationException e) { - // throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); - // } + try { + parsedEndpoint = parser.absoluteIri(); + } catch (ParseException | PrefixDeclarationException e) { + throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); + } URL endpointUrl; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index c90c5e5da..810cd61d1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -116,31 +116,27 @@ String quotedIri() throws PrefixDeclarationException : { iri = absoluteIri() { return "<" + iri + ">"; } } -void source() throws PrefixDeclarationException: -{ +void source() throws PrefixDeclarationException : { String predicateName; DataSource dataSource; Token arity; -} -{ +} { < SOURCE > predicateName = predicateName() arity = < ARITY > < COLON > dataSource = dataSource() < DOT > { int nArity; - nArity = Integer.parseInt(arity.image); // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! + nArity = Integer.parseInt(arity.image); addDataSource(predicateName, nArity, dataSource); } } -DataSource dataSource() throws PrefixDeclarationException: -{ +DataSource dataSource() throws PrefixDeclarationException : { Token sourceName; List< String > arguments; -} -{ - (sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME >) < LPAREN > arguments = Arguments() < RPAREN > - { +} { + ( sourceName = < ARGUMENT_NAME > + | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } catch (ParsingException e) { @@ -149,27 +145,19 @@ DataSource dataSource() throws PrefixDeclarationException: } } -void statement() throws PrefixDeclarationException: -{ +void statement() throws PrefixDeclarationException : { Statement statement; resetVariableSets(); -} -{ - LOOKAHEAD(rule()) statement = rule() { knowledgeBase.addStatement(statement);} -| statement = fact(FormulaContext.HEAD) //not from a rule - { - knowledgeBase.addStatement(statement); - } +} { + ( LOOKAHEAD(rule()) statement = rule() + | statement = fact(FormulaContext.HEAD) ) { knowledgeBase.addStatement(statement); } } -Rule rule() throws PrefixDeclarationException: -{ +Rule rule() throws PrefixDeclarationException : { List < PositiveLiteral > head; List < Literal > body; -} -{ - head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > - { +} { + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) < DOT > { // check that the intersection between headExiVars and BodyVars is empty for (String variable : headExiVars) { if (bodyVars.contains(variable)) @@ -186,57 +174,49 @@ Rule rule() throws PrefixDeclarationException: } } -List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException: -{ +List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException : { PositiveLiteral l; List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); -} -{ - l = positiveLiteral(context) { list.add(l); } - ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* - { return list; } +} { + l = positiveLiteral(context) { list.add(l); } ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* { + return list; + } } -List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException: -{ +List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException : { Literal l; List < Literal > list = new ArrayList < Literal > (); -} -{ - l = literal(context) { list.add(l); } - ( < COMMA > l = literal(context) { list.add(l); } )* - { return list; } +} { + l = literal(context) { list.add(l); } ( < COMMA > l = literal(context) { list.add(l); } )* { + return list; + } } -Literal literal(FormulaContext context) throws PrefixDeclarationException: -{ - Literal l = null; -} -{ - l = positiveLiteral(context) { return l; } -| l = negativeLiteral(context) { return l; } +Literal literal(FormulaContext context) throws PrefixDeclarationException : { + Literal l; +} { + ( l = positiveLiteral(context) + | l = negativeLiteral(context) ) { + return l; + } } -PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException: -{ +PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException : { Token t; List < Term > terms; String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makePositiveLiteral(predicateName, terms); } +} { + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { + return Expressions.makePositiveLiteral(predicateName, terms); + } } -Fact fact(FormulaContext context) throws PrefixDeclarationException: -{ +Fact fact(FormulaContext context) throws PrefixDeclarationException : { Token t; List < Term > terms; String predicateName; -} -{ - predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > - { +} { + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > { try { return Expressions.makeFact(predicateName, terms); } catch (IllegalArgumentException e) { @@ -245,25 +225,22 @@ Fact fact(FormulaContext context) throws PrefixDeclarationException: } } -NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException: -{ +NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException : { List < Term > terms; String predicateName; -} -{ - < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > - { return Expressions.makeNegativeLiteral(predicateName, terms); } +} { + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { + return Expressions.makeNegativeLiteral(predicateName, terms); + } } -List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException: -{ +List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException : { Term t; List < Term > list = new ArrayList < Term > (); -} -{ - t = term(context) { list.add(t); } - ( < COMMA > t = term(context) { list.add(t); } )* - { return list; } +} { + t = term(context) { list.add(t); } ( < COMMA > t = term(context) { list.add(t); } )* { + return list; + } } String predicateName() throws PrefixDeclarationException : { @@ -274,18 +251,16 @@ String predicateName() throws PrefixDeclarationException : { | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } } -Term term(FormulaContext context) throws PrefixDeclarationException: -{ +Term term(FormulaContext context) throws PrefixDeclarationException : { Token t; String s; Constant c; -} -{ - s = IRI(false) { return createConstant(s); } -| c = NumericLiteral() { return c; } -| c = RDFLiteral() { return c; } -| t = < UNIVAR > - { +} { + s = absoluteIri() { return createConstant(s); } + | t = < VARORPREDNAME > { return createConstant(t.image); } + | c = NumericLiteral() { return c; } + | c = RDFLiteral() { return c; } + | t = < UNIVAR > { s = t.image.substring(1); if (context == FormulaContext.HEAD) headUniVars.add(s); @@ -293,8 +268,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException: bodyVars.add(s); return Expressions.makeUniversalVariable(s); } -| t = < EXIVAR > - { + | t = < EXIVAR > { s = t.image.substring(1); if (context == FormulaContext.HEAD) headExiVars.add(s); @@ -302,158 +276,96 @@ Term term(FormulaContext context) throws PrefixDeclarationException: throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); return Expressions.makeExistentialVariable(s); } -| t = < VARORPREDNAME > { return createConstant(t.image); } -// | try { -// c = ConfigurableLiteral () { return c; } -// } catch (ParsingException e) { -// throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); -// } + | try { + c = ConfigurableLiteral () { return c; } + } catch (ParsingException e) { + throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); + } } -/** [16] */ -Constant NumericLiteral() : -{ +Constant NumericLiteral() : { Token t; -} -{ +} { t = < INTEGER > { return createConstant(t.image, PrefixDeclarations.XSD_INTEGER); } -| t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } -| t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } + | t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } + | t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } } -Constant RDFLiteral() throws PrefixDeclarationException: -{ - String lex = null; - String lang = null; // Optional lang tag and datatype. +Constant RDFLiteral() throws PrefixDeclarationException : { + String lex; + Token lang = null; // Optional lang tag and datatype. String dt = null; -} -{ - lex = String() ( lang = Langtag() | < DATATYPE > dt = IRI(false) )? - { return createConstant(lex, lang, dt); } -} - -// Constant ConfigurableLiteral() throws ParsingException: -// { -// Token t; -// } -// { -// ( LOOKAHEAD( < PIPE_DELIMINATED_LITERAL >, -// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) -// t = < PIPE_DELIMINATED_LITERAL > { -// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, -// stripDelimiters(t.image, 1), -// getSubParserFactory()); -// } -// | LOOKAHEAD( < HASH_DELIMINATED_LITERAL >, -// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) -// t = < HASH_DELIMINATED_LITERAL > { -// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, -// stripDelimiters(t.image, 1), -// getSubParserFactory()); -// } -// // | LOOKAHEAD( < BRACKET_DELIMINATED_LITERAL >, -// // { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) -// // t = < BRACKET_DELIMINATED_LITERAL > { -// // return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, -// // stripDelimiters(t.image, 1), -// // getSubParserFactory()); -// // } -// | LOOKAHEAD( < BRACE_DELIMINATED_LITERAL >, -// { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) -// t = < BRACE_DELIMINATED_LITERAL > { -// return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, -// stripDelimiters(t.image, 1), -// getSubParserFactory()); -// } -// ) -// } - -String Langtag() : -{ - Token t; -} -{ - // Enumerate the directives here because they look like language tags. - ( - t = < LANGTAG > - ) - { - String lang = stripChars(t.image, 1); - return lang; - } +} { + lex = String() ( lang = < LANGTAG > | < DATATYPE > dt = absoluteIri() )? { + if (lang != null) { + return Expressions.makeLanguageStringConstant(lex, lang.image); + } + return createConstant(lex, dt); + } } -String String(): -{ +Constant ConfigurableLiteral() throws ParsingException : { + Token t; +} { + ( LOOKAHEAD( < PIPE_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) + t = < PIPE_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < HASH_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) + t = < HASH_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < PAREN_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) + t = < PAREN_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < BRACE_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) + t = < BRACE_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, + getSubParserFactory()); + } + | LOOKAHEAD( < BRACKET_DELIMITED_LITERAL >, + { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) + t = < BRACKET_DELIMITED_LITERAL > { + return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, + getSubParserFactory()); + } + + ) +} + +String String() : { Token t; - String lex; -} -{ - ( t = < SINGLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } - | t = < DOUBLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 1); } - | t = < TRIPLE_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } - | t = < SIXFOLD_QUOTED_STRING > { lex = stripDelimiters(t.image, 3); } - ) - { - lex = unescapeStr(lex, t.beginLine, t.beginColumn); - return lex; - } +} { + ( t = < SINGLE_QUOTED_STRING > + | t = < DOUBLE_QUOTED_STRING > + | t = < TRIPLE_QUOTED_STRING > + | t = < SIXFOLD_QUOTED_STRING > + ) { return unescapeStr(t.image, t.beginLine, t.beginColumn); } } -LinkedList< String > Arguments() throws PrefixDeclarationException: -{ +LinkedList< String > Arguments() throws PrefixDeclarationException : { String str; LinkedList< String > rest = new LinkedList< String >(); -} -{ - (str = String() | str = quotedIri()) [< COMMA > rest = Arguments()] - { +} { + ( str = String() + | str = quotedIri()) [< COMMA > rest = Arguments()] { rest.addFirst(str); return rest; } } -String IRI(boolean includeAngleBrackets) throws PrefixDeclarationException: -{ - String iri; -} -{ - ( - iri = IRIREF() - | iri = PrefixedName() - ) - { - String result = prefixDeclarations.absolutize(iri); - if (includeAngleBrackets) { - result = "<"+result+">"; - } - return result; - } -} - -String PrefixedName() throws PrefixDeclarationException: -{ - Token t; -} -{ - //( - t = < PNAME_LN > - //| t = < PNAME_NS > - //) - { return prefixDeclarations.resolvePrefixedName(t.image);} - //{ return prefixDeclarations.resolvePrefixedName(t.image, t.beginLine, t.beginColumn);} -} - -String IRIREF() : -{ +String PrefixedName() throws PrefixDeclarationException : { Token t; -} -{ - t = < IRI > - { - // we remove '<' and '>' - return stripDelimiters(t.image, 1); - } +} { + t = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(t.image); } } // ------------------------------------------ @@ -468,7 +380,6 @@ String IRIREF() : < COMMENT : "%" ( ~[ "\n" ] )* "\n" > } -// ------------------------------------------ MORE : { "@": DIRECTIVE } @@ -498,14 +409,17 @@ MORE : { | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > | < COMMA : "," > - | < LPAREN : "(" > { + | < RPAREN : ")" > { popState(); } +} + +< DEFAULT, BODY , DIRECTIVE_ARGUMENTS > TOKEN : { + < LPAREN : "(" > { pushState(); if (curLexState == DEFAULT || curLexState == BODY) { SwitchTo(TERM); } } - | < RPAREN : ")" > { popState(); } } < TERM, DIRECTIVE_ARGUMENTS > TOKEN : { @@ -559,9 +473,10 @@ TOKEN : { < TERM > TOKEN : { < UNIVAR : "?" < VARORPREDNAME > > | < EXIVAR : "!" < VARORPREDNAME > > - | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > + | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 1); + } | < DATATYPE : "^^" > - | < IRI : "<" (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > } < TERM, DIRECTIVE_ARGUMENTS > MORE : { @@ -569,40 +484,84 @@ TOKEN : { | < "\"" > { pushState(); } : DOUBLE_QUOTED | < "'''" > { pushState(); }: TRIPLE_QUOTED | < "\"\"\"" > { pushState(); } : SIXFOLD_QUOTED + | < "|" > { pushState(); } : PIPE_DELIMITED + | < "#" > { pushState(); } : HASH_DELIMITED + | < < LPAREN > > { pushState(); } : PAREN_DELIMITED + | < "{" > { pushState(); } : BRACE_DELIMITED + | < "[" > { pushState(); } : BRACKET_DELIMITED +} + +< PIPE_DELIMITED > TOKEN : { + < PIPE_DELIMITED_LITERAL : ( ~ [ "|" ] )* "|" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< HASH_DELIMITED > TOKEN : { + < HASH_DELIMITED_LITERAL : ( ~ [ "#" ] )* "#" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< PAREN_DELIMITED > TOKEN : { + < PAREN_DELIMITED_LITERAL : ( ~ [ ")" ] )* ")" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< BRACE_DELIMITED > TOKEN : { + < BRACE_DELIMITED_LITERAL : ( ( ~ [ "}" ] | [ "}" ] ) )* "}" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< BRACKET_DELIMITED > TOKEN : { + < BRACKET_DELIMITED_LITERAL : ( ( ~ [ "]" ] | [ ")" ] ) )* "]" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } } < SINGLE_QUOTED > TOKEN : { < SINGLE_QUOTED_STRING : ( ~[ "'", "\\", "\n", "\r" ] - | < ESCAPE_SEQUENCE > )* "'" > { popState(); } + | < ESCAPE_SEQUENCE > )* "'" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } } < DOUBLE_QUOTED > TOKEN : { < DOUBLE_QUOTED_STRING : ( ~[ "\"", "\\", "\n", "\r" ] - | < ESCAPE_SEQUENCE > )* "\"" > { popState(); } + | < ESCAPE_SEQUENCE > )* "\"" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } } < TRIPLE_QUOTED > TOKEN : { < TRIPLE_QUOTED_STRING : ( ~[ "'", "\\" ] | < ESCAPE_SEQUENCE > | ( "'" ~[ "'" ] ) - | ( "''" ~[ "'" ] ) )* "'''" > { popState(); } + | ( "''" ~[ "'" ] ) )* "'''" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 3); + } } < SIXFOLD_QUOTED > TOKEN : { < SIXFOLD_QUOTED_STRING : ( ~[ "\"", "\\" ] | < ESCAPE_SEQUENCE > | ( "\"" ~[ "\"" ] ) - | ( "\"\"" ~[ "\"" ] ) )* "\"\"\"" > { popState(); } + | ( "\"\"" ~[ "\"" ] ) )* "\"\"\"" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 3); + } } < SINGLE_QUOTED, DOUBLE_QUOTED, TRIPLE_QUOTED, SIXFOLD_QUOTED > MORE : { < ESCAPE_SEQUENCE : "\\" [ "t", "b", "n", "r", "f", "\\", "\"", "'" ] > } - -// TOKEN : -// { -// < PIPE_DELIMINATED_LITERAL : ( "|" (~[ "|" ])* "|" ) > : DEFAULT -// | < HASH_DELIMINATED_LITERAL : ( "#" (~[ "#" ])* "#" ) > : DEFAULT -// // | < BRACKET_DELIMINATED_LITERAL : ( "[" (~[ "]" ])* "]" ) > : DEFAULT -// | < BRACE_DELIMINATED_LITERAL : ( "{" (~[ "}" ])* "}" ) > : DEFAULT -// } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index f55801423..436c49f8f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -109,13 +109,17 @@ public enum ConfigurableLiteralDelimiter { */ HASH, /** - * Literals of the form {@code […]} + * Literals of the form {@code (…)} */ - BRACKET, + PAREN, /** * Literals of the form {@code {…}} */ BRACE, + /** + * Literals of the form {@code […]} + */ + BRACKET, } public JavaCCParserBase() { @@ -134,21 +138,16 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { return Expressions.makeAbstractConstant(absoluteIri); } - Constant createConstant(String lexicalForm, String datatype) throws ParseException { - return createConstant(lexicalForm, null, datatype); - } - /** * Creates a suitable {@link Constant} from the parsed data. * * @param string the string data (unescaped) - * @param languageTag the language tag, or null if not present * @param datatype the datatype, or null if not provided * @return suitable constant */ - Constant createConstant(String lexicalForm, String languageTag, String datatype) throws ParseException { + Constant createConstant(String lexicalForm, String datatype) throws ParseException { try { - return parserConfiguration.parseConstant(lexicalForm, languageTag, datatype); + return parserConfiguration.parseDatatypeConstant(lexicalForm, datatype); } catch (ParsingException e) { throw makeParseExceptionWithCause("Failed to parse Constant", e); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java index d509fe7f4..f89c5f012 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java @@ -63,11 +63,4 @@ public void registerDatatype_dataSourceName_succeeds() { parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler).registerDatatype(SOURCE_NAME, datatypeConstantHandler); } - - @Test(expected = IllegalArgumentException.class) - public void parseConstant_languageTagWithExplictDatatype_throws() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.parseConstant("test", "test", "test"); - } - } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index ff6de9d2e..fa58b9d5a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -44,17 +44,20 @@ public class RuleParserConfigurableLiteralTest { public static final Constant pipeConstant = Expressions.makeAbstractConstant("testPipe"); public static final Constant hashConstant = Expressions.makeAbstractConstant("testHash"); - public static final Constant bracketConstant = Expressions.makeAbstractConstant("testBracket"); + public static final Constant parenConstant = Expressions.makeAbstractConstant("testParen"); public static final Constant braceConstant = Expressions.makeAbstractConstant("testBrace"); + public static final Constant bracketConstant = Expressions.makeAbstractConstant("testBracket"); public static final ConfigurableLiteralHandler pipeHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.PIPE, pipeConstant); public static final ConfigurableLiteralHandler hashHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.HASH, hashConstant); - public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( - ConfigurableLiteralDelimiter.BRACKET, bracketConstant); + public static final ConfigurableLiteralHandler parenHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.PAREN, parenConstant); public static final ConfigurableLiteralHandler braceHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.BRACE, braceConstant); + public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACKET, bracketConstant); @Test(expected = ParsingException.class) public void testNoDefaultPipeLiteral() throws ParsingException { @@ -93,12 +96,11 @@ public void testCustomHashLiteral() throws ParsingException { } @Test - @Ignore - public void testCustomBracketLiteral() throws ParsingException { + public void testCustomParenLiteral() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); - Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); - assertEquals(bracketConstant, result.getConstants().toArray()[0]); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler); + Literal result = RuleParser.parseLiteral("p((test))", parserConfiguration); + assertEquals(parenConstant, result.getConstants().toArray()[0]); } @Test @@ -110,14 +112,21 @@ public void testCustomBraceLiteral() throws ParsingException { } @Test - @Ignore + public void testCustomBracketLiteral() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); + assertEquals(bracketConstant, result.getConstants().toArray()[0]); + } + + @Test public void testMixedCustomLiterals() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler) .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); - Literal result = RuleParser.parseLiteral("p(||, #test#, [[], {})", parserConfiguration); + Literal result = RuleParser.parseLiteral("p(||, #test#, [], {})", parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( Arrays.asList(pipeConstant, hashConstant, bracketConstant, braceConstant)); @@ -135,6 +144,32 @@ public void testNonTrivialCustomPipeLiteral() throws ParsingException { assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } + @Test + public void testNestedBraceLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p({{" + label + "}})"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant("{" + label + "}"), result.getConstants().toArray()[0]); + } + + @Test + public void testMixedAndNestedCustomLiterals() throws ParsingException { + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p(|{}|, #test#, [|test, #test#, test|], ([], {}, [{[{}]}]))", parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(pipeConstant, hashConstant, bracketConstant, parenConstant)); + assertEquals(expected, constants); + } + static Constant makeReversedConstant(String name) { StringBuilder builder = new StringBuilder(name); return Expressions.makeAbstractConstant(builder.reverse().toString()); From 5964d523e557116709046df32e5a51b53b3c97c9 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 29 Jan 2020 17:38:36 +0100 Subject: [PATCH 0774/1255] Parser: Separate JavaCCParserBase internals from parsing code --- .../vlog4j/parser/javacc/JavaCCParser.jj | 53 ++++++++---------- .../parser/javacc/JavaCCParserBase.java | 55 +++++++++++++++++-- .../parser/javacc/SubParserFactory.java | 12 ++-- 3 files changed, 79 insertions(+), 41 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 810cd61d1..68c5a8b7e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -90,7 +90,7 @@ void base() throws PrefixDeclarationException : { Token iri; } { < BASE > iri = < IRI_ABSOLUTE > < DOT > { - prefixDeclarations.setBase(iri.image); + setBase(iri.image); } } @@ -99,15 +99,15 @@ void prefix() throws PrefixDeclarationException : { String iri; } { < PREFIX > pn = < PNAME_NS > iri = absoluteIri() < DOT > { - prefixDeclarations.setPrefix(pn.image, iri); + setPrefix(pn.image, iri); } } String absoluteIri() throws PrefixDeclarationException : { Token iri; } { - iri = < IRI_ABSOLUTE > { return prefixDeclarations.absolutize(iri.image); } - | iri = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(iri.image); } + iri = < IRI_ABSOLUTE > { return absolutizeIri(iri.image); } + | iri = < PNAME_LN > { return resolvePrefixedName(iri.image); } } String quotedIri() throws PrefixDeclarationException : { @@ -137,11 +137,7 @@ DataSource dataSource() throws PrefixDeclarationException : { } { ( sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { - try { - return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); - } catch (ParsingException e) { - throw makeParseExceptionWithCause("Failed while trying to parse the source-specific part of a data source declaration", e); - } + return parseDataSourceSpecificPartOfDataSourceDeclaration(sourceName.image, arguments, getSubParserFactory()); } } @@ -150,7 +146,9 @@ void statement() throws PrefixDeclarationException : { resetVariableSets(); } { ( LOOKAHEAD(rule()) statement = rule() - | statement = fact(FormulaContext.HEAD) ) { knowledgeBase.addStatement(statement); } + | statement = fact(FormulaContext.HEAD) ) { + addStatement(statement); + } } Rule rule() throws PrefixDeclarationException : { @@ -248,13 +246,14 @@ String predicateName() throws PrefixDeclarationException : { String s; } { s = absoluteIri() { return s; } - | t = < VARORPREDNAME > { return prefixDeclarations.absolutize(t.image); } + | t = < VARORPREDNAME > { return absolutizeIri(t.image); } } Term term(FormulaContext context) throws PrefixDeclarationException : { Token t; String s; Constant c; + Term tt; } { s = absoluteIri() { return createConstant(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } @@ -277,7 +276,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { return Expressions.makeExistentialVariable(s); } | try { - c = ConfigurableLiteral () { return c; } + tt = ConfigurableLiteral () { return tt; } } catch (ParsingException e) { throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); } @@ -304,40 +303,34 @@ Constant RDFLiteral() throws PrefixDeclarationException : { } } -Constant ConfigurableLiteral() throws ParsingException : { +Term ConfigurableLiteral() throws ParsingException : { Token t; } { ( LOOKAHEAD( < PIPE_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) t = < PIPE_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, t.image, getSubParserFactory()); } | LOOKAHEAD( < HASH_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) t = < HASH_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, getSubParserFactory()); } | LOOKAHEAD( < PAREN_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) t = < PAREN_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, getSubParserFactory()); } | LOOKAHEAD( < BRACE_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) t = < BRACE_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, getSubParserFactory()); } | LOOKAHEAD( < BRACKET_DELIMITED_LITERAL >, - { parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) t = < BRACKET_DELIMITED_LITERAL > { - return parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, - getSubParserFactory()); + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, getSubParserFactory()); } - ) } @@ -365,7 +358,7 @@ LinkedList< String > Arguments() throws PrefixDeclarationException : { String PrefixedName() throws PrefixDeclarationException : { Token t; } { - t = < PNAME_LN > { return prefixDeclarations.resolvePrefixedName(t.image); } + t = < PNAME_LN > { return resolvePrefixedName(t.image); } } // ------------------------------------------ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 436c49f8f..273c9f403 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -21,6 +21,7 @@ */ import java.util.HashSet; +import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; @@ -28,6 +29,8 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -52,10 +55,10 @@ * */ public class JavaCCParserBase { - protected PrefixDeclarations prefixDeclarations; + private PrefixDeclarations prefixDeclarations; - protected KnowledgeBase knowledgeBase; - protected ParserConfiguration parserConfiguration; + private KnowledgeBase knowledgeBase; + private ParserConfiguration parserConfiguration; /** * "Local" variable to remember (universal) body variables during parsing. @@ -153,6 +156,10 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } } + void addStatement(Statement statement) { + knowledgeBase.addStatement(statement); + } + void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { if (dataSource.getRequiredArity().isPresent()) { Integer requiredArity = dataSource.getRequiredArity().get(); @@ -163,7 +170,7 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw } Predicate predicate = Expressions.makePredicate(predicateName, arity); - knowledgeBase.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); } static String unescapeStr(String s, int line, int column) throws ParseException { @@ -296,11 +303,47 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - protected void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { + void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; } - protected PrefixDeclarations getPrefixDeclarations() { + PrefixDeclarations getPrefixDeclarations() { return prefixDeclarations; } + + DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, + List arguments, SubParserFactory subParserFactory) throws ParseException { + try { + return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(syntacticForm, arguments, + subParserFactory); + } catch (ParsingException e) { + throw makeParseExceptionWithCause( + "Failed while trying to parse the source-specific part of a data source declaration", e); + } + } + + Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + SubParserFactory subParserFactory) throws ParsingException { + return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); + } + + boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { + return parserConfiguration.isConfigurableLiteralRegistered(delimiter); + } + + void setBase(String baseIri) throws PrefixDeclarationException { + prefixDeclarations.setBase(baseIri); + } + + void setPrefix(String prefixName, String baseIri) throws PrefixDeclarationException { + prefixDeclarations.setPrefix(prefixName, baseIri); + } + + String absolutizeIri(String iri) throws PrefixDeclarationException { + return prefixDeclarations.absolutize(iri); + } + + String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + return prefixDeclarations.resolvePrefixedName(prefixedName); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index c4a012baf..c92c4be83 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -30,9 +30,10 @@ import org.semanticweb.vlog4j.parser.RuleParser; /** - * Factory for creating a SubParser sharing configuration, state, and prefixes, - * but with an independent input stream, to be used, e.g., for parsing arguments - * in data source declarations. + * Factory for creating a SubParser sharing configuration, (semantic) + * state, and prefixes, but with an independent input stream, to be + * used, e.g., for parsing arguments in data source declarations. The + * parser will start in the {@code DEFAULT} lexical state. * * @author Maximilian Marx */ @@ -44,7 +45,8 @@ public class SubParserFactory { /** * Construct a SubParserFactory. * - * @param parser the parser instance to get the state from. + * @param parser the parser instance to get the (semantic) state + * from. */ SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); @@ -53,7 +55,7 @@ public class SubParserFactory { } /** - * Create a new parser with the specified state and given input. + * Create a new parser with the specified (semantic) state and given input. * * @param inputStream the input stream to parse. * @param encoding encoding of the input stream. From c150f28bdeb8b060f216179a6d786f8a7a2f63b5 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 29 Jan 2020 20:48:21 +0100 Subject: [PATCH 0775/1255] Parser: Fix handling of nested configurable literals --- .../vlog4j/parser/javacc/JavaCCParser.jj | 11 +- .../RuleParserConfigurableLiteralTest.java | 102 ++++++++++++++++++ 2 files changed, 109 insertions(+), 4 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 68c5a8b7e..f9328127b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -7,7 +7,7 @@ options STATIC = false; // DEBUG_PARSER = true; - // DEBUG_TOKEN_MANAGER = true ; + // DEBUG_TOKEN_MANAGER = true; } PARSER_BEGIN(JavaCCParser) @@ -499,24 +499,27 @@ TOKEN : { } < PAREN_DELIMITED > TOKEN : { - < PAREN_DELIMITED_LITERAL : ( ~ [ ")" ] )* ")" > { + < PAREN_DELIMITED_LITERAL : ( < UNPAREN > ( "(" < UNPAREN > ")" )* )* < UNPAREN > ")" > { popState(); matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); } + | < #UNPAREN : ( ~ [ "(", ")" ] )* > } < BRACE_DELIMITED > TOKEN : { - < BRACE_DELIMITED_LITERAL : ( ( ~ [ "}" ] | [ "}" ] ) )* "}" > { + < BRACE_DELIMITED_LITERAL : ( < UNBRACE > ( "{" < UNBRACE > "}" )* )* < UNBRACE > "}" > { popState(); matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); } + | < #UNBRACE : (~ [ "{", "}" ] )* > } < BRACKET_DELIMITED > TOKEN : { - < BRACKET_DELIMITED_LITERAL : ( ( ~ [ "]" ] | [ ")" ] ) )* "]" > { + < BRACKET_DELIMITED_LITERAL : ( < UNBRACKET > ( "[" < UNBRACKET > "]" )* )* < UNBRACKET > "]" > { popState(); matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); } + | < #UNBRACKET : ( ~ [ "[", "]" ] )* > } < SINGLE_QUOTED > TOKEN : { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index fa58b9d5a..65490a2ae 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -144,6 +144,43 @@ public void testNonTrivialCustomPipeLiteral() throws ParsingException { assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } + @Test + public void testNestedParenLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p(((" + label + ")))"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant("(" + label + ")"), result.getConstants().toArray()[0]); + } + + @Test + public void testMultipleParenLiterals() throws ParsingException { + String input = "p((test), (tset))"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void testMultipleNestedParenLiterals() throws ParsingException { + String input = "p(((test)), ((tset)))"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("(test)"), makeReversedConstant("(tset)"))); + assertEquals(expected, constants); + } + @Test public void testNestedBraceLiteral() throws ParsingException { String label = "this is a test, do not worry."; @@ -155,6 +192,71 @@ public void testNestedBraceLiteral() throws ParsingException { assertEquals(makeReversedConstant("{" + label + "}"), result.getConstants().toArray()[0]); } + @Test + public void testMultipleBraceLiterals() throws ParsingException { + String input = "p({test}, {tset})"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void testMultipleNestedBraceLiterals() throws ParsingException { + String input = "p({{test}}, {{tset}})"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("{test}"), makeReversedConstant("{tset}"))); + assertEquals(expected, constants); + } + + @Test + public void testNestedBracketLiteral() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p([[" + label + "]])"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant("[" + label + "]"), result.getConstants().toArray()[0]); + } + + @Test + public void testMultipleBracketLiterals() throws ParsingException { + String input = "p([test], [tset])"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void testMultipleNestedBracketLiterals() throws ParsingException { + String input = "p([[test]], [[tset]])"; + + + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset]"))); + assertEquals(expected, constants); + } + @Test public void testMixedAndNestedCustomLiterals() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); From f81afd6b36d5dd97bd5f0a2193b3ca59a8f32269 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 30 Jan 2020 18:31:17 +0100 Subject: [PATCH 0776/1255] Parser: Fix handling of nested configurable literals, really --- .../vlog4j/parser/javacc/JavaCCParser.jj | 116 +++++++++++++----- .../RuleParserConfigurableLiteralTest.java | 19 ++- 2 files changed, 91 insertions(+), 44 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index f9328127b..95d68ed9f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -7,6 +7,7 @@ options STATIC = false; // DEBUG_PARSER = true; + // DEBUG_LOOKAHEAD = true; // DEBUG_TOKEN_MANAGER = true; } @@ -45,7 +46,6 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultData import org.semanticweb.vlog4j.core.model.implementation.Expressions; - public class JavaCCParser extends JavaCCParserBase { private SubParserFactory getSubParserFactory() { @@ -68,7 +68,7 @@ TOKEN_MGR_DECLS : { Deque states; void pushState() { - states.push(curLexState); + states.push(curLexState); } void popState() { @@ -76,7 +76,6 @@ TOKEN_MGR_DECLS : { } } - void parse() throws PrefixDeclarationException : { } { ( base() )? @@ -304,6 +303,7 @@ Constant RDFLiteral() throws PrefixDeclarationException : { } Term ConfigurableLiteral() throws ParsingException : { + String s; Token t; } { ( LOOKAHEAD( < PIPE_DELIMITED_LITERAL >, @@ -316,24 +316,75 @@ Term ConfigurableLiteral() throws ParsingException : { t = < HASH_DELIMITED_LITERAL > { return parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, getSubParserFactory()); } - | LOOKAHEAD( < PAREN_DELIMITED_LITERAL >, - { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) - t = < PAREN_DELIMITED_LITERAL > { - return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, t.image, getSubParserFactory()); + | LOOKAHEAD( < LPAREN >, + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) + s = parenDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, s, getSubParserFactory()); } - | LOOKAHEAD( < BRACE_DELIMITED_LITERAL >, + | LOOKAHEAD( < LBRACE >, { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) - t = < BRACE_DELIMITED_LITERAL > { - return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, t.image, getSubParserFactory()); + s = braceDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, s, getSubParserFactory()); } - | LOOKAHEAD( < BRACKET_DELIMITED_LITERAL >, + | LOOKAHEAD( < LBRACKET >, { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) - t = < BRACKET_DELIMITED_LITERAL > { - return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, t.image, getSubParserFactory()); + s = bracketDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, s, getSubParserFactory()); } ) } +String parenDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LPAREN > ( s = parenDelimitedLiteralBody() { sb.append(s); } )* < RPAREN > { + return sb.toString(); + } +} + +String parenDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNPAREN > { return t.image; } + | ( < LPAREN > s = parenDelimitedLiteralBody() < RPAREN > ) { return "(" + s + ")"; } +} + +String braceDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LBRACE > ( s = braceDelimitedLiteralBody() { sb.append(s); } )* < RBRACE > { + return sb.toString(); + } +} + +String braceDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNBRACE > { return t.image; } + | ( < LBRACE > s = braceDelimitedLiteralBody() < RBRACE > ) { return "{" + s + "}"; } +} + +String bracketDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LBRACKET > ( s = bracketDelimitedLiteralBody() { sb.append(s); } )* < RBRACKET > { + return sb.toString(); + } +} + +String bracketDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNBRACKET > { return t.image; } + | ( < LBRACKET > s = bracketDelimitedLiteralBody() < RBRACKET > ) { return "[" + s + "]"; } +} + String String() : { Token t; } { @@ -402,17 +453,19 @@ MORE : { | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > | < COMMA : "," > - | < RPAREN : ")" > { popState(); } } -< DEFAULT, BODY , DIRECTIVE_ARGUMENTS > TOKEN : { +< DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS, PAREN_DELIMITED > TOKEN : { < LPAREN : "(" > { pushState(); - if (curLexState == DEFAULT || curLexState == BODY) { + if (curLexState == TERM) { + SwitchTo(PAREN_DELIMITED); + } else if (curLexState == DEFAULT || curLexState == BODY) { SwitchTo(TERM); } } + | < RPAREN : ")" > { popState(); } } < TERM, DIRECTIVE_ARGUMENTS > TOKEN : { @@ -479,9 +532,14 @@ TOKEN : { | < "\"\"\"" > { pushState(); } : SIXFOLD_QUOTED | < "|" > { pushState(); } : PIPE_DELIMITED | < "#" > { pushState(); } : HASH_DELIMITED - | < < LPAREN > > { pushState(); } : PAREN_DELIMITED - | < "{" > { pushState(); } : BRACE_DELIMITED - | < "[" > { pushState(); } : BRACKET_DELIMITED +} + +< TERM, BRACE_DELIMITED > TOKEN : { + < LBRACE : "{" > { pushState(); } : BRACE_DELIMITED +} + +< TERM, BRACKET_DELIMITED > TOKEN : { + < LBRACKET : "[" > { pushState(); } : BRACKET_DELIMITED } < PIPE_DELIMITED > TOKEN : { @@ -499,27 +557,17 @@ TOKEN : { } < PAREN_DELIMITED > TOKEN : { - < PAREN_DELIMITED_LITERAL : ( < UNPAREN > ( "(" < UNPAREN > ")" )* )* < UNPAREN > ")" > { - popState(); - matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); - } - | < #UNPAREN : ( ~ [ "(", ")" ] )* > + < UNPAREN : ( ~ [ "(", ")" ] )+ > } < BRACE_DELIMITED > TOKEN : { - < BRACE_DELIMITED_LITERAL : ( < UNBRACE > ( "{" < UNBRACE > "}" )* )* < UNBRACE > "}" > { - popState(); - matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); - } - | < #UNBRACE : (~ [ "{", "}" ] )* > + < RBRACE : "}" > { popState(); } + | < UNBRACE : (~ [ "{", "}" ] )+ > } < BRACKET_DELIMITED > TOKEN : { - < BRACKET_DELIMITED_LITERAL : ( < UNBRACKET > ( "[" < UNBRACKET > "]" )* )* < UNBRACKET > "]" > { - popState(); - matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); - } - | < #UNBRACKET : ( ~ [ "[", "]" ] )* > + < RBRACKET : "]" > { popState(); } + | < UNBRACKET : ( ~ [ "[", "]" ] )+ > } < SINGLE_QUOTED > TOKEN : { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index 65490a2ae..51d9173f7 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -29,7 +29,6 @@ import java.util.stream.Collectors; import org.junit.Test; -import org.junit.Ignore; import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Literal; @@ -146,13 +145,13 @@ public void testNonTrivialCustomPipeLiteral() throws ParsingException { @Test public void testNestedParenLiteral() throws ParsingException { - String label = "this is a test, do not worry."; - String input = "p(((" + label + ")))"; + String label = "(((this is a test, do not worry.)))"; + String input = "p((" + label + "))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); Literal result = RuleParser.parseLiteral(input, parserConfiguration); - assertEquals(makeReversedConstant("(" + label + ")"), result.getConstants().toArray()[0]); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test @@ -183,13 +182,13 @@ public void testMultipleNestedParenLiterals() throws ParsingException { @Test public void testNestedBraceLiteral() throws ParsingException { - String label = "this is a test, do not worry."; - String input = "p({{" + label + "}})"; + String label = "{{{this is a test, do not worry.}}}"; + String input = "p({" + label + "})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); Literal result = RuleParser.parseLiteral(input, parserConfiguration); - assertEquals(makeReversedConstant("{" + label + "}"), result.getConstants().toArray()[0]); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test @@ -220,13 +219,13 @@ public void testMultipleNestedBraceLiterals() throws ParsingException { @Test public void testNestedBracketLiteral() throws ParsingException { - String label = "this is a test, do not worry."; - String input = "p([[" + label + "]])"; + String label = "[[[this is a test, do not worry.]]]"; + String input = "p([" + label + "])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); Literal result = RuleParser.parseLiteral(input, parserConfiguration); - assertEquals(makeReversedConstant("[" + label + "]"), result.getConstants().toArray()[0]); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test From 912a68e1f828219e4cf8412780d13817d2759d73 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 30 Jan 2020 19:05:45 +0100 Subject: [PATCH 0777/1255] Parser: Extend & slightly refactor tests --- .../RuleParserConfigurableLiteralTest.java | 91 +++++++++---------- .../parser/RuleParserDataSourceTest.java | 2 +- .../vlog4j/syntax/parser/RuleParserTest.java | 6 +- 3 files changed, 49 insertions(+), 50 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index 51d9173f7..09770733d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -59,12 +59,12 @@ public class RuleParserConfigurableLiteralTest { ConfigurableLiteralDelimiter.BRACKET, bracketConstant); @Test(expected = ParsingException.class) - public void testNoDefaultPipeLiteral() throws ParsingException { + public void parseLiteral_unregisteredCustomLiteral_throws() throws ParsingException { RuleParser.parseLiteral("p(|test|)"); } @Test - public void testCustomLiteralRegistration() throws ParsingException { + public void registerLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); assertTrue("Configurable Literal Handler has been registered", @@ -72,14 +72,14 @@ public void testCustomLiteralRegistration() throws ParsingException { } @Test(expected = IllegalArgumentException.class) - public void testNoDuplicateCustomLiteralRegistration() throws ParsingException, IllegalArgumentException { + public void registerLiteral_duplicateHandler_throws() throws ParsingException, IllegalArgumentException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.PIPE, hashHandler); } @Test - public void testCustomPipeLiteral() throws ParsingException { + public void parseLiteral_customPipeLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); Literal result = RuleParser.parseLiteral("p(|test|)", parserConfiguration); @@ -87,7 +87,7 @@ public void testCustomPipeLiteral() throws ParsingException { } @Test - public void testCustomHashLiteral() throws ParsingException { + public void parseLiteral_customHashLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler); Literal result = RuleParser.parseLiteral("p(#test#)", parserConfiguration); @@ -95,7 +95,7 @@ public void testCustomHashLiteral() throws ParsingException { } @Test - public void testCustomParenLiteral() throws ParsingException { + public void parseLiteral_customParenLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler); Literal result = RuleParser.parseLiteral("p((test))", parserConfiguration); @@ -103,7 +103,7 @@ public void testCustomParenLiteral() throws ParsingException { } @Test - public void testCustomBraceLiteral() throws ParsingException { + public void parseLiteral_customBraceLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); Literal result = RuleParser.parseLiteral("p({test})", parserConfiguration); @@ -111,7 +111,7 @@ public void testCustomBraceLiteral() throws ParsingException { } @Test - public void testCustomBracketLiteral() throws ParsingException { + public void parseLiteral_customBracketLiteral_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); @@ -119,7 +119,7 @@ public void testCustomBracketLiteral() throws ParsingException { } @Test - public void testMixedCustomLiterals() throws ParsingException { + public void parseLiteral_mixedLiterals_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) @@ -133,33 +133,30 @@ public void testMixedCustomLiterals() throws ParsingException { } @Test - public void testNonTrivialCustomPipeLiteral() throws ParsingException { + public void parseLiteral_nontrivialPipeLiteral_succeeds() throws ParsingException { String label = "this is a test, do not worry."; String input = "p(|" + label + "|)"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testNestedParenLiteral() throws ParsingException { + public void parseLiteral_nestedParenLiterals_succeeds() throws ParsingException { String label = "(((this is a test, do not worry.)))"; String input = "p((" + label + "))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testMultipleParenLiterals() throws ParsingException { + public void parseLiteral_multipleParenLiterals_succeeds() throws ParsingException { String input = "p((test), (tset))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -168,35 +165,40 @@ public void testMultipleParenLiterals() throws ParsingException { } @Test - public void testMultipleNestedParenLiterals() throws ParsingException { - String input = "p(((test)), ((tset)))"; + public void parseLiteral_multipleNestedParenLiterals_succeeds() throws ParsingException { + String input = "p(((test)), ((tset), (tst)))"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( - Arrays.asList(makeReversedConstant("(test)"), makeReversedConstant("(tset)"))); + Arrays.asList(makeReversedConstant("(test)"), makeReversedConstant("(tset), (tst)"))); assertEquals(expected, constants); } + @Test(expected = ParsingException.class) + public void parseLiteral_mismatchedNestedParenLiteral_throws() throws ParsingException { + String input = "p((test ())"; + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); + RuleParser.parseLiteral(input, parserConfiguration); + } + @Test - public void testNestedBraceLiteral() throws ParsingException { + public void parseLiteral_nestedBraceLiteral_succeeds() throws ParsingException { String label = "{{{this is a test, do not worry.}}}"; String input = "p({" + label + "})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testMultipleBraceLiterals() throws ParsingException { + public void parseLiteral_multipleBraceLiterals_succeeds() throws ParsingException { String input = "p({test}, {tset})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -205,11 +207,10 @@ public void testMultipleBraceLiterals() throws ParsingException { } @Test - public void testMultipleNestedBraceLiterals() throws ParsingException { + public void parseLiteral_multipleNestedBraceLiterals_succeeds() throws ParsingException { String input = "p({{test}}, {{tset}})"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -218,22 +219,20 @@ public void testMultipleNestedBraceLiterals() throws ParsingException { } @Test - public void testNestedBracketLiteral() throws ParsingException { + public void parseLiteral_nestedBracketLiteral_succeeds() throws ParsingException { String label = "[[[this is a test, do not worry.]]]"; String input = "p([" + label + "])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); } @Test - public void testMultipleBracketLiterals() throws ParsingException { + public void parseLiteral_multipleBracketLiterals_succeeds() throws ParsingException { String input = "p([test], [tset])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( @@ -242,22 +241,19 @@ public void testMultipleBracketLiterals() throws ParsingException { } @Test - public void testMultipleNestedBracketLiterals() throws ParsingException { - String input = "p([[test]], [[tset]])"; - - + public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws ParsingException { + String input = "p([[test]], [[tset], [tst]])"; ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm)); + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( - Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset]"))); + Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset], [tst]"))); assertEquals(expected, constants); } @Test - public void testMixedAndNestedCustomLiterals() throws ParsingException { + public void parseLiteral_mixedAndNestedLiterals_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) @@ -276,6 +272,9 @@ static Constant makeReversedConstant(String name) { return Expressions.makeAbstractConstant(builder.reverse().toString()); } + static ConfigurableLiteralHandler reversingHandler = + (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm); + static ConfigurableLiteralHandler getMockLiteralHandler(ConfigurableLiteralDelimiter delimiter, Constant constant) { ConfigurableLiteralHandler handler = mock(ConfigurableLiteralHandler.class); try { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 5a357f407..42a835558 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -190,7 +190,7 @@ public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws } @Test - public void parseDataSourceDeclaration_windowsStylePathName_success() throws ParsingException, IOException { + public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws ParsingException, IOException { RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 8a75123bd..061fcc00f 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -238,7 +238,7 @@ public void testIncompleteStringLiteral() throws ParsingException { } @Test - public void parseLiteral_escapeSequences_success() throws ParsingException { + public void parseLiteral_escapeSequences_succeeds() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); @@ -253,7 +253,7 @@ public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { } @Test - public void parseLiteral_allEscapeSequences_success() throws ParsingException { + public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", @@ -281,7 +281,7 @@ public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingE } @Test - public void parseLiteral_multiLineLiteral_success() throws ParsingException { + public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); From 0850a674de6875f45c0be99532cb935cb9900e79 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 31 Jan 2020 16:18:21 +0100 Subject: [PATCH 0778/1255] Parser: Generalise Configurable Literals to return Terms --- .../semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java | 4 ++-- .../org/semanticweb/vlog4j/parser/ParserConfiguration.java | 3 ++- .../java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java index a98bfec68..bd5b14c24 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -40,5 +40,5 @@ public interface ConfigurableLiteralHandler { * @throws ParsingException when the given syntactic form is invalid. * @return an appropriate @{link Constant} instance. */ - public Constant parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; + public Term parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 096a1a06c..e9536d756 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -151,7 +152,7 @@ public boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter deli * the given syntactic form is invalid. * @return an appropriate {@link Constant} instance. */ - public Constant parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + public Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException { if (!isConfigurableLiteralRegistered(delimiter)) { throw new ParsingException( diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 95d68ed9f..9ecf7ded8 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -501,7 +501,7 @@ TOKEN : { < BASE : "base" > : DIRECTIVE_ARGUMENTS | < PREFIX : "prefix" > : DIRECTIVE_ARGUMENTS | < SOURCE : "source" > : DIRECTIVE_ARGUMENTS - | < CUSTOM : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS + | < CUSTOM_DIRECTIVE : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS | < DIRECTIVENAME : [ "a"-"z", "A"-"Z" ] ([ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ])* > } From bd32a10690787acc8b6f2e777f973359ccfd7039 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Sun, 2 Feb 2020 14:55:52 +0100 Subject: [PATCH 0779/1255] Parser: Make handling of arguments in directives type-safe --- .../parser/DataSourceDeclarationHandler.java | 4 +- .../vlog4j/parser/DirectiveArgument.java | 208 ++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 8 +- .../CsvFileDataSourceDeclarationHandler.java | 14 +- .../RdfFileDataSourceDeclarationHandler.java | 14 +- ...eryResultDataSourceDeclarationHandler.java | 46 ++-- .../vlog4j/parser/javacc/JavaCCParser.jj | 31 ++- .../parser/javacc/JavaCCParserBase.java | 3 +- .../parser/RuleParserDataSourceTest.java | 6 +- 9 files changed, 286 insertions(+), 48 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 180b93053..7a519414e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -47,7 +47,7 @@ public interface DataSourceDeclarationHandler { * Source, or the number of arguments is invalid. * @return a {@link DataSource} instance corresponding to the given arguments. */ - DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -59,7 +59,7 @@ DataSource handleDeclaration(List arguments, final SubParserFactory subP * @throws ParsingException when the given number of Arguments is invalid for * the Data Source. */ - static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { + static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException("Invalid number of arguments " + arguments.size() + " for Data Source declaration, expected " + number); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java new file mode 100644 index 000000000..d7fc50b95 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java @@ -0,0 +1,208 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URL; +import java.util.Optional; +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.api.Term; + +/** + * A tagged union representing the possible types allowed to appear as arguments + * in directives. + * + * @author Maximilian Marx + */ +public abstract class DirectiveArgument { + private DirectiveArgument() { + } + + /** + * Apply a function to the contained value. + * + * @argument stringHandler the function to apply to a string argument + * @argument iriHandler the function to apply to an IRI + * @argument termHandler the function to apply to a Term + * + * @return the value returned by the appropriate handler function + */ + public abstract V apply(Function stringHandler, + Function iriHandler, Function termHandler); + + /** + * Partially compare two arguments, without comparing the actual values. + * + * @argument other the Object to compare to. + * + * @return An {@link Optional} containing true if the arguments are surely + * equal, containing false if the arguments are not equal, or an empty + * Optional if the values of the arguments need to be compared. + * + */ + protected Optional isEqual(Object other) { + if (other == null) { + return Optional.of(false); + } + + if (other == this) { + return Optional.of(true); + } + + if (!(other instanceof DirectiveArgument)) { + return Optional.of(false); + } + + return Optional.empty(); + } + + /** + * Create an argument containing a String. + * + * @argument value the string value + * + * @return An argument containing the given string value + */ + public static DirectiveArgument string(String value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler) { + return stringHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> str.equals(value), iri -> false, term -> false); + } + + @Override + public int hashCode() { + return 41 * value.hashCode(); + } + }; + } + + /** + * Create an argument containing a IRI. + * + * @argument value the IRI value + * + * @return An argument containing the given IRI value + */ + public static DirectiveArgument iri(URL value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler) { + return iriHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> iri.equals(value), term -> false); + } + + @Override + public int hashCode() { + return 43 * value.hashCode(); + } + }; + } + + /** + * Create an argument containing a Term. + * + * @argument value the Term value + * + * @return An argument containing the given Term value + */ + public static DirectiveArgument term(Term value) { + return new DirectiveArgument() { + @Override + public V apply(Function stringHandler, + Function iriHandler, Function termHandler) { + return termHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + DirectiveArgument otherArgument = (DirectiveArgument) other; + return otherArgument.apply(str -> false, iri -> false, term -> term.equals(value)); + } + + @Override + public int hashCode() { + return 47 * value.hashCode(); + } + }; + } + + /** + * Create an optional from a (possible) string value. + * + * @return An optional containing the contained string, or an empty Optional if + * the argument doesn't contain a string. + */ + public Optional fromString() { + return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) IRI value. + * + * @return An optional containing the contained IRI, or an empty Optional if the + * argument doesn't contain a IRI. + */ + public Optional fromIri() { + return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) Term value. + * + * @return An optional containing the contained Term, or an empty Optional if + * the argument doesn't contain a Term. + */ + public Optional fromTerm() { + return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index e9536d756..204acafe8 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -95,8 +95,8 @@ public ParserConfiguration registerDataSource(final String name, final DataSourc * * @return the Data Source instance. */ - public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, final List args, - final SubParserFactory subParserFactory) throws ParsingException { + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final String name, + final List args, final SubParserFactory subParserFactory) throws ParsingException { final DataSourceDeclarationHandler handler = this.dataSources.get(name); if (handler == null) { @@ -113,8 +113,8 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin * @param languageTag the language tag, or null if not present. * @param the datatype, or null if not present. * - * @throws ParsingException when the lexical form is invalid for the - * given data type. + * @throws ParsingException when the lexical form is invalid for the given data + * type. * @return the {@link Constant} corresponding to the given arguments. */ public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 8b7db9640..afe42c2cf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -23,10 +23,12 @@ import java.io.File; import java.io.IOException; import java.util.List; +import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -37,12 +39,18 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); - String fileName = arguments.get(0); - File file = new File(fileName); + DirectiveArgument fileNameArgument = arguments.get(0); + String fileName; + try { + fileName = fileNameArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); + } + File file = new File(fileName); try { return new CsvFileDataSource(file); } catch (IOException e) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index a17145e19..e54fc66c7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -23,10 +23,12 @@ import java.io.File; import java.io.IOException; import java.util.List; +import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -37,12 +39,18 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); - String fileName = arguments.get(0); - File file = new File(fileName); + DirectiveArgument fileNameArgument = arguments.get(0); + String fileName; + try { + fileName = fileNameArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); + } + File file = new File(fileName); try { return new RdfFileDataSource(file); } catch (IOException e) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index b49683115..9f313ddd1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -4,14 +4,14 @@ * #%L * VLog4j Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 VLog4j Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,17 +20,15 @@ * #L% */ -import java.net.MalformedURLException; import java.net.URL; import java.util.List; +import java.util.NoSuchElementException; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; -import org.semanticweb.vlog4j.parser.javacc.ParseException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -40,29 +38,35 @@ */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); - String endpoint = arguments.get(0); - JavaCCParser parser = subParserFactory.makeSubParser(endpoint); - String parsedEndpoint; + DirectiveArgument endpointArgument = arguments.get(0); + URL endpoint; try { - parsedEndpoint = parser.absoluteIri(); - } catch (ParseException | PrefixDeclarationException e) { - throw new ParsingException("Error while parsing endpoint IRI in SPARQL query data source: " + e.getMessage(), e); + endpoint = endpointArgument.fromIri().get(); + } catch (NoSuchElementException e) { + throw new ParsingException( + "SPARQL endpoint \"" + endpointArgument + "\" is not a valid IRI: " + e.getMessage(), e); } - URL endpointUrl; + DirectiveArgument variablesArgument = arguments.get(1); + String variables; try { - endpointUrl = new URL(parsedEndpoint); - } catch (MalformedURLException e) { - throw new ParsingException("SPARQL endpoint \"" + endpoint + "\" is not a valid URL: " + e.getMessage(), e); + variables = variablesArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("Variables list \"" + variablesArgument + "\" is not a string.", e); } - String variables = arguments.get(1); - String query = arguments.get(2); + DirectiveArgument queryArgument = arguments.get(2); + String query; + try { + query = queryArgument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException("Query fragment \"" + queryArgument + "\" is not a string.", e); + } - return new SparqlQueryResultDataSource(endpointUrl, variables, query); + return new SparqlQueryResultDataSource(endpoint, variables, query); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 9ecf7ded8..1176f9c7a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -27,6 +27,7 @@ import java.util.ArrayDeque; import java.util.LinkedList; import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -109,12 +110,6 @@ String absoluteIri() throws PrefixDeclarationException : { | iri = < PNAME_LN > { return resolvePrefixedName(iri.image); } } -String quotedIri() throws PrefixDeclarationException : { - String iri; -} { - iri = absoluteIri() { return "<" + iri + ">"; } -} - void source() throws PrefixDeclarationException : { String predicateName; DataSource dataSource; @@ -132,7 +127,7 @@ void source() throws PrefixDeclarationException : { DataSource dataSource() throws PrefixDeclarationException : { Token sourceName; - List< String > arguments; + List< DirectiveArgument > arguments; } { ( sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { @@ -395,13 +390,25 @@ String String() : { ) { return unescapeStr(t.image, t.beginLine, t.beginColumn); } } -LinkedList< String > Arguments() throws PrefixDeclarationException : { +LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : { + DirectiveArgument argument; String str; - LinkedList< String > rest = new LinkedList< String >(); + Term t; + LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); } { - ( str = String() - | str = quotedIri()) [< COMMA > rest = Arguments()] { - rest.addFirst(str); + ( str = String() { argument = DirectiveArgument.string(str); } + | str = absoluteIri() { + URL url; + try { + url = new URL(str); + } catch (MalformedURLException e) { + throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); + } + argument = DirectiveArgument.iri(url); + } + | t = term(FormulaContext.HEAD) { argument = DirectiveArgument.term(t); } + ) [< COMMA > rest = Arguments()] { + rest.addFirst(argument); return rest; } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 273c9f403..54c4b7212 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -35,6 +35,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -312,7 +313,7 @@ PrefixDeclarations getPrefixDeclarations() { } DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, - List arguments, SubParserFactory subParserFactory) throws ParseException { + List arguments, SubParserFactory subParserFactory) throws ParseException { try { return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(syntacticForm, arguments, subParserFactory); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 42a835558..ac55a714e 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -43,6 +43,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -141,11 +142,12 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), + doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), ArgumentMatchers.any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; - List expectedArguments = Arrays.asList("hello", "world"); + List expectedArguments = Arrays.asList(DirectiveArgument.string("hello"), + DirectiveArgument.string("world")); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); From 548b38e32b7bc37009a7b6d86cf4ed7f9b6eceaf Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 3 Feb 2020 13:06:34 +0100 Subject: [PATCH 0780/1255] Parser: Generalise handling of directives from DataSources --- .../parser/DataSourceDeclarationHandler.java | 38 +---------- .../vlog4j/parser/DirectiveHandler.java | 66 +++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 2 +- .../CsvFileDataSourceDeclarationHandler.java | 5 +- .../RdfFileDataSourceDeclarationHandler.java | 5 +- ...eryResultDataSourceDeclarationHandler.java | 9 +-- .../parser/RuleParserDataSourceTest.java | 4 +- 7 files changed, 82 insertions(+), 47 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 7a519414e..4ec871f68 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -20,11 +20,9 @@ * #L% */ -import java.util.List; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** * Handler for parsing a custom Data Source declaration. @@ -32,37 +30,5 @@ * @author Maximilian Marx */ @FunctionalInterface -public interface DataSourceDeclarationHandler { - /** - * Parse a Data Source Declaration. - * - * This is called by the parser to instantiate the {@link DataSource} component - * of a {@link DataSourceDeclaration}. - * - * @param arguments Arguments given to the Data Source declaration. - * @param subParserFactory a factory for obtaining a SubParser, sharing the - * parser's state, but bound to new input. - * - * @throws ParsingException when any of the arguments is invalid for the Data - * Source, or the number of arguments is invalid. - * @return a {@link DataSource} instance corresponding to the given arguments. - */ - DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) - throws ParsingException; - - /** - * Validate the provided number of arguments to the data source. - * - * @param arguments Arguments given to the Data Source declaration. - * @param number expected number of arguments - * - * @throws ParsingException when the given number of Arguments is invalid for - * the Data Source. - */ - static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { - if (arguments.size() != number) { - throw new ParsingException("Invalid number of arguments " + arguments.size() - + " for Data Source declaration, expected " + number); - } - } +public interface DataSourceDeclarationHandler extends DirectiveHandler { } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java new file mode 100644 index 000000000..2883225be --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -0,0 +1,66 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing a custom directive. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DirectiveHandler { + /** + * Parse a Directive. + * + * This is called by the parser to parse directives. + * + * @param arguments Arguments given to the Directive statement. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when any of the arguments is invalid for the + * directive, or the number of arguments is invalid. + * @return a {@code T} instance corresponding to the given arguments. + */ + T handleDirective(List arguments, final SubParserFactory subParserFactory) + throws ParsingException; + + /** + * Validate the provided number of arguments to the data source. + * + * @param arguments Arguments given to the Directive statement. + * @param number expected number of arguments + * + * @throws ParsingException when the given number of Arguments is invalid for + * the Directive statement. + */ + static void validateNumberOfArguments(final List arguments, final int number) + throws ParsingException { + if (arguments.size() != number) { + throw new ParsingException( + "Invalid number of arguments " + arguments.size() + " for Directive statement, expected " + number); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 204acafe8..6ad757659 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -103,7 +103,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin throw new ParsingException("Data source \"" + name + "\" is not known."); } - return handler.handleDeclaration(args, subParserFactory); + return handler.handleDirective(args, subParserFactory); } /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index afe42c2cf..edd66a12f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -39,9 +40,9 @@ */ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); + DirectiveHandler.validateNumberOfArguments(arguments, 1); DirectiveArgument fileNameArgument = arguments.get(0); String fileName; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index e54fc66c7..32a5f6b23 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -29,6 +29,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -39,9 +40,9 @@ */ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 1); + DirectiveHandler.validateNumberOfArguments(arguments, 1); DirectiveArgument fileNameArgument = arguments.get(0); String fileName; try { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 9f313ddd1..cda6f38b9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,6 +28,7 @@ import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -38,9 +39,9 @@ */ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { @Override - public DataSource handleDeclaration(List arguments, final SubParserFactory subParserFactory) + public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { - DataSourceDeclarationHandler.validateNumberOfArguments(arguments, 3); + DirectiveHandler.validateNumberOfArguments(arguments, 3); DirectiveArgument endpointArgument = arguments.get(0); URL endpoint; diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index ac55a714e..4d045d292 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -142,7 +142,7 @@ public void testCustomDataSource() throws ParsingException { DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource("mock-source", handler); - doReturn(source).when(handler).handleDeclaration(ArgumentMatchers.>any(), + doReturn(source).when(handler).handleDirective(ArgumentMatchers.>any(), ArgumentMatchers.any()); String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; @@ -150,7 +150,7 @@ public void testCustomDataSource() throws ParsingException { DirectiveArgument.string("world")); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); - verify(handler).handleDeclaration(eq(expectedArguments), ArgumentMatchers.any()); + verify(handler).handleDirective(eq(expectedArguments), ArgumentMatchers.any()); } @Test From c273806cd99c2a49662c7701b0246d4f36db05fb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 3 Feb 2020 16:21:43 +0100 Subject: [PATCH 0781/1255] Parser: Support parsing of Named Nulls in facts --- .../core/model/implementation/Serializer.java | 6 +- .../vlog4j/core/model/TermImplTest.java | 6 +- .../vlog4j/parser/ParserConfiguration.java | 55 +++++++++++++++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 9 ++- .../parser/javacc/JavaCCParserBase.java | 10 ++++ .../parser/RuleParserParseFactTest.java | 37 ++++++++++--- 6 files changed, 106 insertions(+), 17 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index a9f7006f4..b9f3c2b51 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -57,7 +57,7 @@ public final class Serializer { public static final String NEGATIVE_IDENTIFIER = "~"; public static final String EXISTENTIAL_IDENTIFIER = "!"; public static final String UNIVERSAL_IDENTIFIER = "?"; - public static final String NAMEDNULL_IDENTIFIER = "_"; + public static final String NAMEDNULL_IDENTIFIER = "_:"; public static final String OPENING_PARENTHESIS = "("; public static final String CLOSING_PARENTHESIS = ")"; public static final String OPENING_BRACKET = "["; @@ -342,7 +342,7 @@ private static String getIRIString(final String string) { *
            * Example for {@code string = "\\a"}, the returned value is * {@code string = "\"\\\\a\""} - * + * * @param string * @return an escaped string surrounded by {@code "}. */ @@ -362,7 +362,7 @@ public static String getString(final String string) { *
          • {@code \r}
          • *
          • {@code \f}
          • *
              - * + * * @param string * @return an escaped string */ diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index 81bf20c96..aed7efd89 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -167,7 +167,7 @@ public void existentialVariableToStringTest() { @Test public void namedNullToStringTest() { NamedNullImpl n = new NamedNullImpl("123"); - assertEquals("_123", n.toString()); + assertEquals("_:123", n.toString()); } @Test(expected = NullPointerException.class) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 6ad757659..6e97f8245 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -39,6 +39,11 @@ * @author Maximilian Marx */ public class ParserConfiguration { + /** + * Whether to allow parsing Named Nulls. + */ + private boolean allowNamedNulls = false; + /** * The registered data sources. */ @@ -183,6 +188,17 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon return this; } + /** + * Register a custom literal handler. + * + * @argument delimiter the delimiter to handle. + * @argument handler the handler for this literal type. + * + * @throws IllegalArgumentException when the literal delimiter has + * already been registered. + * + * @return this + */ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimiter, ConfigurableLiteralHandler handler) throws IllegalArgumentException { if (literals.containsKey(delimiter)) { @@ -192,4 +208,43 @@ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimite this.literals.put(delimiter, handler); return this; } + + /** + * Set whether to allow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * + * @argument allow true allows parsing of named nulls. + * + * @return this + */ + public ParserConfiguration setNamedNulls(boolean allow) { + this.allowNamedNulls = allow; + return this; + } + + /** + * Allow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * + * @return this + */ + public ParserConfiguration allowNamedNulls() { + return this.setNamedNulls(true); + } + + /** + * Disallow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * + * @return this + */ + public ParserConfiguration disallowNamedNulls() { + return this.setNamedNulls(false); + } + + /** + * Whether parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull} is allowed. + * + * @return this + */ + public boolean isParsingOfNamedNullsAllowed() { + return this.allowNamedNulls; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 1176f9c7a..631e5c422 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -251,6 +251,8 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { } { s = absoluteIri() { return createConstant(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } + | LOOKAHEAD( < NAMED_NULL >, { isParsingOfNamedNullsAllowed() }) + t = < NAMED_NULL > { return createNamedNull(t.image); } | c = NumericLiteral() { return c; } | c = RDFLiteral() { return c; } | t = < UNIVAR > { @@ -396,8 +398,8 @@ LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : Term t; LinkedList< DirectiveArgument > rest = new LinkedList< DirectiveArgument >(); } { - ( str = String() { argument = DirectiveArgument.string(str); } - | str = absoluteIri() { + ( LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } + | LOOKAHEAD(absoluteIri()) str = absoluteIri() { URL url; try { url = new URL(str); @@ -460,6 +462,9 @@ MORE : { | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > | < COMMA : "," > + | < NAMED_NULL : "_:" ( < PN_CHARS_U > | [ "0"-"9" ] ) (( < PN_CHARS > | "." )* < PN_CHARS > )? > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 2); + } } < DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS, PAREN_DELIMITED > TOKEN : { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 54c4b7212..008314039 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -27,12 +27,14 @@ import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; @@ -142,6 +144,10 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { return Expressions.makeAbstractConstant(absoluteIri); } + NamedNull createNamedNull(String lexicalForm) { + return new NamedNullImpl(lexicalForm); + } + /** * Creates a suitable {@link Constant} from the parsed data. * @@ -332,6 +338,10 @@ boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) return parserConfiguration.isConfigurableLiteralRegistered(delimiter); } + boolean isParsingOfNamedNullsAllowed() { + return parserConfiguration.isParsingOfNamedNullsAllowed(); + } + void setBase(String baseIri) throws PrefixDeclarationException { prefixDeclarations.setBase(baseIri); } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java index 876b01ae8..0d0bd03be 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java @@ -25,8 +25,11 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; @@ -34,35 +37,51 @@ public class RuleParserParseFactTest { private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarations.XSD_STRING); private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarations.XSD_STRING); + private final NamedNull null1 = new NamedNullImpl("1"); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); + private final Fact fact1 = Expressions.makeFact("p", null1); @Test - public void testFactArityOne() throws ParsingException { + public void parseFact_string_succeeds() throws ParsingException { assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); } @Test - public void testFactArityOneWithDataType() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); - } - - @Test - public void testFactArityTwo() throws ParsingException { + public void parseFact_twoStrings_succeeds() throws ParsingException { assertEquals(RuleParser.parseFact("p(\"a\",\"b\") ."), factAB); } @Test(expected = ParsingException.class) - public void testFactWithVariable() throws ParsingException { + public void parseFact_nonGroundFact_throws() throws ParsingException { String input = "p(?X) ."; RuleParser.parseFact(input); } @Test(expected = ParsingException.class) - public void testZeroArityFact() throws ParsingException { + public void parseFact_arityZeroFact_throws() throws ParsingException { String input = "p() ."; RuleParser.parseFact(input); } + @Test(expected = ParsingException.class) + public void parseFact_namedNull_throws() throws ParsingException { + String input = "p(_:1) ."; + RuleParser.parseFact(input); + } + + @Test + public void parseFact_namedNullAllowed_succeeds() throws ParsingException { + String input = "p(_:1) ."; + ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); + assertEquals(RuleParser.parseFact(input, parserConfiguration), fact1); + } + + @Test(expected = ParsingException.class) + public void parseFact_namedNullAsPredicateName_throws() throws ParsingException { + String input = "_:p(\"a\") ."; + ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); + RuleParser.parseFact(input, parserConfiguration); + } } From b7271a58269049a9eb480ceb2940bc9738aadb77 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 3 Feb 2020 18:27:36 +0100 Subject: [PATCH 0782/1255] Parser: Add support for custom directives --- .../vlog4j/parser/DirectiveHandler.java | 19 ++++- .../vlog4j/parser/ParserConfiguration.java | 74 +++++++++++++++++-- .../vlog4j/parser/javacc/JavaCCParser.jj | 29 ++++++-- .../parser/javacc/JavaCCParserBase.java | 10 ++- 4 files changed, 113 insertions(+), 19 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 2883225be..54ebebb90 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -22,6 +22,8 @@ import java.util.List; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; /** @@ -44,7 +46,7 @@ public interface DirectiveHandler { * directive, or the number of arguments is invalid. * @return a {@code T} instance corresponding to the given arguments. */ - T handleDirective(List arguments, final SubParserFactory subParserFactory) + public T handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException; /** @@ -56,11 +58,24 @@ T handleDirective(List arguments, final SubParserFactory subP * @throws ParsingException when the given number of Arguments is invalid for * the Directive statement. */ - static void validateNumberOfArguments(final List arguments, final int number) + public static void validateNumberOfArguments(final List arguments, final int number) throws ParsingException { if (arguments.size() != number) { throw new ParsingException( "Invalid number of arguments " + arguments.size() + " for Directive statement, expected " + number); } } + + /** + * Obtain a {@link KnowledgeBase} from a {@link SubParserFactory}. + * + * @argument subParserFactory the SubParserFactory. + * + * @return the knowledge base. + */ + default KnowledgeBase getKnowledgeBase(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getKnowledgeBase(); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 6e97f8245..5f59f75da 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -21,6 +21,7 @@ */ import java.util.HashMap; +import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.Validate; @@ -30,6 +31,7 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -40,7 +42,12 @@ */ public class ParserConfiguration { /** - * Whether to allow parsing Named Nulls. + * Reserved directive names that are not allowed to be registered. + */ + public static final List RESERVED_DIRECTIVE_NAMES = Arrays.asList("base", "prefix", "source"); + + /** + * Whether parsing Named Nulls is allowed. */ private boolean allowNamedNulls = false; @@ -59,6 +66,11 @@ public class ParserConfiguration { */ private HashMap literals = new HashMap<>(); + /** + * The registered custom directives. + */ + private HashMap> directives = new HashMap<>(); + /** * Register a new (type of) Data Source. * @@ -194,23 +206,68 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon * @argument delimiter the delimiter to handle. * @argument handler the handler for this literal type. * - * @throws IllegalArgumentException when the literal delimiter has - * already been registered. + * @throws IllegalArgumentException when the literal delimiter has already been + * registered. * * @return this */ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimiter, ConfigurableLiteralHandler handler) throws IllegalArgumentException { - if (literals.containsKey(delimiter)) { - throw new IllegalArgumentException("Literal delimiter \"" + delimiter + "\" is already registered."); - } + Validate.isTrue(!this.literals.containsKey(delimiter), "Literal delimiter \"%s\" is already registered.", + delimiter); this.literals.put(delimiter, handler); return this; } /** - * Set whether to allow parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull}. + * Register a directive. + * + * @argument name the name of the directive. + * @argument handler the handler for this directive. + * + * @throws IllegalArgumentException when the directive name has already been + * registered, or is a reserved name (i.e., one + * of {@code base}, {@code prefix}, and + * {@code source}). + * + * @return this + */ + public ParserConfiguration registerDirective(String name, DirectiveHandler handler) + throws IllegalArgumentException { + Validate.isTrue(!RESERVED_DIRECTIVE_NAMES.contains(name), "The name \"%s\" is a reserved directive name.", + name); + Validate.isTrue(!this.directives.containsKey(name), "The directive \"%s\" is already registered.", name); + + this.directives.put(name, handler); + return this; + } + + /** + * Parse a directive statement. + * + * @argument name the name of the directive. + * @argument arguments the arguments given in the statement. + * + * @throws ParsingException when the directive is not known, or the arguments + * are invalid for the directive. + * + * @return the (possibly updated) KnowledgeBase + */ + public KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) + throws ParsingException { + final DirectiveHandler handler = this.directives.get(name); + + if (handler == null) { + throw new ParsingException("Directive \"" + name + "\" is not known."); + } + + return handler.handleDirective(arguments, subParserFactory); + } + + /** + * Set whether to allow parsing of + * {@link semanticweb.vlog4j.core.model.api.NamedNull}. * * @argument allow true allows parsing of named nulls. * @@ -240,7 +297,8 @@ public ParserConfiguration disallowNamedNulls() { } /** - * Whether parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull} is allowed. + * Whether parsing of {@link org.semanticweb.vlog4j.core.model.api.NamedNull} is + * allowed. * * @return this */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 631e5c422..7024f63c3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -41,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -126,8 +127,8 @@ void source() throws PrefixDeclarationException : { } DataSource dataSource() throws PrefixDeclarationException : { - Token sourceName; - List< DirectiveArgument > arguments; + Token sourceName; + List< DirectiveArgument > arguments; } { ( sourceName = < ARGUMENT_NAME > | sourceName = < VARORPREDNAME > ) < LPAREN > arguments = Arguments() < RPAREN > { @@ -135,14 +136,24 @@ DataSource dataSource() throws PrefixDeclarationException : { } } +KnowledgeBase directive() throws PrefixDeclarationException : { + Token name; + List< DirectiveArgument > arguments; +} { + name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { + return parseDirectiveStatement(name.image, arguments, getSubParserFactory()); + } +} + void statement() throws PrefixDeclarationException : { Statement statement; + KnowledgeBase knowledgeBase; resetVariableSets(); } { - ( LOOKAHEAD(rule()) statement = rule() - | statement = fact(FormulaContext.HEAD) ) { - addStatement(statement); - } + ( LOOKAHEAD(rule()) statement = rule() { addStatement(statement); } + | statement = fact(FormulaContext.HEAD) { addStatement(statement); } + | knowledgeBase = directive() { setKnowledgeBase(knowledgeBase); } + ) } Rule rule() throws PrefixDeclarationException : { @@ -274,7 +285,7 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { | try { tt = ConfigurableLiteral () { return tt; } } catch (ParsingException e) { - throw new ParseException("Invalid configurable literal expression: " + e.getMessage()); + throw makeParseExceptionWithCause("Invalid configurable literal expression", e); } } @@ -513,7 +524,9 @@ TOKEN : { < BASE : "base" > : DIRECTIVE_ARGUMENTS | < PREFIX : "prefix" > : DIRECTIVE_ARGUMENTS | < SOURCE : "source" > : DIRECTIVE_ARGUMENTS - | < CUSTOM_DIRECTIVE : < DIRECTIVENAME > > : DIRECTIVE_ARGUMENTS + | < CUSTOM_DIRECTIVE : < DIRECTIVENAME > > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 1); + }: DIRECTIVE_ARGUMENTS | < DIRECTIVENAME : [ "a"-"z", "A"-"Z" ] ([ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ])* > } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 008314039..7ebeb6e9d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -331,7 +331,15 @@ DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticFo Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, SubParserFactory subParserFactory) throws ParsingException { - return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); + return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); + } + + KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParseException { + try { + return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); + } catch (ParsingException e) { + throw makeParseExceptionWithCause("Failed while trying to parse directive statement", e); + } } boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { From 10756350a6986d7ec56d9ae4c2e8754943d9cef0 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Thu, 6 Feb 2020 17:01:08 +0100 Subject: [PATCH 0783/1255] Parser: Support `@import` statement in rules files --- .../parser/DefaultParserConfiguration.java | 2 + .../vlog4j/parser/DirectiveHandler.java | 103 ++++++++++++++++++ .../CsvFileDataSourceDeclarationHandler.java | 12 +- .../RdfFileDataSourceDeclarationHandler.java | 12 +- ...eryResultDataSourceDeclarationHandler.java | 29 +---- .../ImportFileDirectiveHandler.java | 63 +++++++++++ .../vlog4j/syntax/parser/RuleParserTest.java | 24 ++-- vlog4j-parser/src/test/resources/facts.rls | 4 + 8 files changed, 196 insertions(+), 53 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java create mode 100644 vlog4j-parser/src/test/resources/facts.rls diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java index a52f02b87..383cca87c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -24,6 +24,7 @@ import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.vlog4j.parser.directives.ImportFileDirectiveHandler; /** * Default parser configuration. Registers default data sources. @@ -34,6 +35,7 @@ public class DefaultParserConfiguration extends ParserConfiguration { public DefaultParserConfiguration() { super(); registerDefaultDataSources(); + registerDirective("import", new ImportFileDirectiveHandler()); } /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 54ebebb90..2c5cd6954 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -20,8 +20,13 @@ * #L% */ +import java.io.File; +import java.io.IOException; +import java.net.URL; import java.util.List; +import java.util.NoSuchElementException; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; @@ -66,6 +71,91 @@ public static void validateNumberOfArguments(final List argum } } + /** + * Validate that the provided argument is a {@link String}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a {@link String}. + * + * @return the contained {@link String}. + */ + public static String validateStringArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + try { + return argument.fromString().get(); + } catch (NoSuchElementException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); + } + } + + /** + * Validate that the provided argument is a file name. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a valid file name. + * + * @return the File corresponding to the contained file name. + */ + public static File validateFilenameArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + String fileName = DirectiveHandler.validateStringArgument(argument, description); + File file = new File(fileName); + try { + // we don't care about the actual path, just that there is one. + file.getCanonicalPath(); + } catch (IOException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a valid file path.", e); + } + + return file; + } + + /** + * Validate that the provided argument is an IRI. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not an IRI. + * + * @return the contained IRI. + */ + public static URL validateIriArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + try { + return argument.fromIri().get(); + } catch (NoSuchElementException e) { + throw new ParsingException(description + "\"" + argument + "\" is not an IRI.", e); + } + } + + /** + * Validate that the provided argument is a {@link Term}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a {@link Term}. + * + * @return the contained {@link Term}. + */ + public static Term validateTermArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + try { + return argument.fromTerm().get(); + } catch (NoSuchElementException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a string.", e); + } + } + /** * Obtain a {@link KnowledgeBase} from a {@link SubParserFactory}. * @@ -78,4 +168,17 @@ default KnowledgeBase getKnowledgeBase(SubParserFactory subParserFactory) { return subParser.getKnowledgeBase(); } + + /** + * Obtain a {@link ParserConfiguration} from a {@link SubParserFactory}. + * + * @argument subParserFactory the SubParserFactory. + * + * @return the parser configuration. + */ + default ParserConfiguration getParserConfiguration(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getParserConfiguration(); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java index edd66a12f..5d6fac9db 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -23,7 +23,6 @@ import java.io.File; import java.io.IOException; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; @@ -43,19 +42,12 @@ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - DirectiveArgument fileNameArgument = arguments.get(0); - String fileName; - try { - fileName = fileNameArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); - } + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); - File file = new File(fileName); try { return new CsvFileDataSource(file); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java index 32a5f6b23..1018a3f51 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -23,7 +23,6 @@ import java.io.File; import java.io.IOException; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -43,19 +42,12 @@ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - DirectiveArgument fileNameArgument = arguments.get(0); - String fileName; - try { - fileName = fileNameArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("File name \"" + fileNameArgument + "\" is not a string.", e); - } + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); - File file = new File(fileName); try { return new RdfFileDataSource(file); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); } } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index cda6f38b9..ff178435c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -22,7 +22,6 @@ import java.net.URL; import java.util.List; -import java.util.NoSuchElementException; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; @@ -42,31 +41,9 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 3); - - DirectiveArgument endpointArgument = arguments.get(0); - URL endpoint; - try { - endpoint = endpointArgument.fromIri().get(); - } catch (NoSuchElementException e) { - throw new ParsingException( - "SPARQL endpoint \"" + endpointArgument + "\" is not a valid IRI: " + e.getMessage(), e); - } - - DirectiveArgument variablesArgument = arguments.get(1); - String variables; - try { - variables = variablesArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("Variables list \"" + variablesArgument + "\" is not a string.", e); - } - - DirectiveArgument queryArgument = arguments.get(2); - String query; - try { - query = queryArgument.fromString().get(); - } catch (NoSuchElementException e) { - throw new ParsingException("Query fragment \"" + queryArgument + "\" is not a string.", e); - } + URL endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); + String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); + String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); return new SparqlQueryResultDataSource(endpoint, variables, query); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java new file mode 100644 index 000000000..4334cfb01 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -0,0 +1,63 @@ +package org.semanticweb.vlog4j.parser.directives; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.List; + +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@code @import} statements. + * + * @author Maximilian Marx + */ +public class ImportFileDirectiveHandler implements DirectiveHandler { + @Override + public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DirectiveHandler.validateNumberOfArguments(arguments, 1); + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); + FileInputStream stream; + + // @todo(mx): should we try to prevent cyclic imports? + try { + stream = new FileInputStream(file); + } catch (IOException e) { + throw new ParsingException("Failed to read rules from \"" + file.getName() + "\"", e); + } + + KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + + RuleParser.parseInto(knowledgeBase, stream, parserConfiguration); + + return knowledgeBase; + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 061fcc00f..62e87cdc5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -25,6 +25,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import org.junit.Test; import org.mockito.ArgumentMatchers; @@ -40,6 +41,7 @@ import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -58,7 +60,7 @@ public class RuleParserTest { private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", x, y); private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); - private final Fact fact = Expressions.makeFact("http://example.org/s", c); + private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); @@ -70,28 +72,28 @@ public class RuleParserTest { public void testExplicitIri() throws ParsingException { String input = "() ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test public void testPrefixResolution() throws ParsingException { String input = "@prefix ex: . ex:s(ex:c) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test public void testBaseRelativeResolution() throws ParsingException { String input = "@base . () ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test public void testBaseResolution() throws ParsingException { String input = "@base . s(c) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test @@ -317,7 +319,7 @@ public void testUnicodeLiteral() throws ParsingException { public void testUnicodeUri() throws ParsingException { String input = "@base . @prefix ex: . ex:\\u0073(c) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test @@ -340,7 +342,7 @@ public void testLineComments() throws ParsingException { String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + " ex:s(ex:c) . % comment \n"; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); - assertEquals(Arrays.asList(fact), statements); + assertEquals(Arrays.asList(fact1), statements); } @Test @@ -439,4 +441,12 @@ public void testCustomDatatype() throws ParsingException { assertEquals(constant, result); } + @Test + public void parse_importStatement_succeeds() throws ParsingException { + String input = "@import \"src/test/resources/facts.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } } diff --git a/vlog4j-parser/src/test/resources/facts.rls b/vlog4j-parser/src/test/resources/facts.rls new file mode 100644 index 000000000..ce985f5a9 --- /dev/null +++ b/vlog4j-parser/src/test/resources/facts.rls @@ -0,0 +1,4 @@ +@prefix ex: . + +ex:s(ex:c) . +p("abc") . From 1e508b5a0a3ed40858d0621dbe59be67a7c970ec Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 10 Feb 2020 17:43:31 +0100 Subject: [PATCH 0784/1255] Parser: Avoid duplicate imports in @import statements --- .../vlog4j/core/reasoner/KnowledgeBase.java | 903 +++++++++--------- .../ImportFileDirectiveHandler.java | 27 +- 2 files changed, 483 insertions(+), 447 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index d63f9be70..3198deafa 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -1,434 +1,469 @@ -package org.semanticweb.vlog4j.core.reasoner; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * A knowledge base with rules, facts, and declarations for loading data from - * further sources. This is a "syntactic" object in that it represents some - * information that is not relevant for the semantics of reasoning, but that is - * needed to ensure faithful re-serialisation of knowledge bases loaded from - * files (e.g., preserving order). - * - * @author Markus Kroetzsch - * - */ -public class KnowledgeBase implements Iterable { - - private final Set listeners = new HashSet<>(); - - /** - * Auxiliary class to process {@link Statement}s when added to the knowledge - * base. Returns true if a statement was added successfully. - * - * @author Markus Kroetzsch - * - */ - private class AddStatementVisitor implements StatementVisitor { - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.addFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.add(statement); - return true; - } - } - - private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); - - /** - * Auxiliary class to process {@link Statement}s when removed from the knowledge - * base. Returns true if a statement was removed successfully. - * - * @author Irina Dragoste - * - */ - private class RemoveStatementVisitor implements StatementVisitor { - - @Override - public Boolean visit(final Fact statement) { - KnowledgeBase.this.removeFact(statement); - return true; - } - - @Override - public Boolean visit(final Rule statement) { - return true; - } - - @Override - public Boolean visit(final DataSourceDeclaration statement) { - KnowledgeBase.this.dataSourceDeclarations.remove(statement); - return true; - } - } - - private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); - - private class ExtractStatementsVisitor implements StatementVisitor { - - final ArrayList extracted = new ArrayList<>(); - final Class ownType; - - ExtractStatementsVisitor(final Class type) { - this.ownType = type; - } - - ArrayList getExtractedStatements() { - return this.extracted; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Fact statement) { - if (this.ownType.equals(Fact.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final Rule statement) { - if (this.ownType.equals(Rule.class)) { - this.extracted.add((T) statement); - } - return null; - } - - @SuppressWarnings("unchecked") - @Override - public Void visit(final DataSourceDeclaration statement) { - if (this.ownType.equals(DataSourceDeclaration.class)) { - this.extracted.add((T) statement); - } - return null; - } - } - - /** - * The primary storage for the contents of the knowledge base. - */ - private final LinkedHashSet statements = new LinkedHashSet<>(); - -// TODO support prefixes -// /** -// * Known prefixes that can be used to pretty-print the contents of the knowledge -// * base. We try to preserve user-provided prefixes found in files when loading -// * data. -// */ -// PrefixDeclarations prefixDeclarations; - - /** - * Index structure that organises all facts by their predicate. - */ - private final Map> factsByPredicate = new HashMap<>(); - - /** - * Index structure that holds all data source declarations of this knowledge - * base. - */ - private final Set dataSourceDeclarations = new HashSet<>(); - - /** - * Registers a listener for changes on the knowledge base - * - * @param listener - */ - public void addListener(final KnowledgeBaseListener listener) { - this.listeners.add(listener); - } - - /** - * Unregisters given listener from changes on the knowledge base - * - * @param listener - */ - public void deleteListener(final KnowledgeBaseListener listener) { - this.listeners.remove(listener); - - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - */ - public void addStatement(final Statement statement) { - if (this.doAddStatement(statement)) { - this.notifyListenersOnStatementAdded(statement); - } - } - - /** - * Adds a single statement to the knowledge base. - * - * @param statement the statement to be added - * @return true, if the knowledge base has changed. - */ - boolean doAddStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { - this.statements.add(statement); - return true; - } - return false; - } - - /** - * Adds a collection of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Collection statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Adds a list of statements to the knowledge base. - * - * @param statements the statements to be added - */ - public void addStatements(final Statement... statements) { - final List addedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doAddStatement(statement)) { - addedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsAdded(addedStatements); - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - */ - public void removeStatement(final Statement statement) { - if (this.doRemoveStatement(statement)) { - this.notifyListenersOnStatementRemoved(statement); - } - } - - /** - * Removes a single statement from the knowledge base. - * - * @param statement the statement to remove - * @return true, if the knowledge base has changed. - */ - boolean doRemoveStatement(final Statement statement) { - Validate.notNull(statement, "Statement cannot be Null!"); - - if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { - this.statements.remove(statement); - return true; - } - return false; - } - - /** - * Removes a collection of statements to the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Collection statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - /** - * Removes a list of statements from the knowledge base. - * - * @param statements the statements to remove - */ - public void removeStatements(final Statement... statements) { - final List removedStatements = new ArrayList<>(); - - for (final Statement statement : statements) { - if (this.doRemoveStatement(statement)) { - removedStatements.add(statement); - } - } - - this.notifyListenersOnStatementsRemoved(removedStatements); - } - - private void notifyListenersOnStatementAdded(final Statement addedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementAdded(addedStatement); - } - } - - private void notifyListenersOnStatementsAdded(final List addedStatements) { - if (!addedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsAdded(addedStatements); - } - } - } - - private void notifyListenersOnStatementRemoved(final Statement removedStatement) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementRemoved(removedStatement); - } - } - - private void notifyListenersOnStatementsRemoved(final List removedStatements) { - if (!removedStatements.isEmpty()) { - for (final KnowledgeBaseListener listener : this.listeners) { - listener.onStatementsRemoved(removedStatements); - } - } - } - - /** - * Get the list of all rules that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete rules. - * - * @return list of {@link Rule}s - */ - public List getRules() { - return this.getStatementsByType(Rule.class); - } - - /** - * Get the list of all facts that have been added to the knowledge base. The - * list is read-only and cannot be modified to add or delete facts. - * - * @return list of {@link Fact}s - */ - public List getFacts() { - return this.getStatementsByType(Fact.class); - } - - /** - * Get the list of all data source declarations that have been added to the - * knowledge base. The list is read-only and cannot be modified to add or delete - * facts. - * - * @return list of {@link DataSourceDeclaration}s - */ - public List getDataSourceDeclarations() { - return this.getStatementsByType(DataSourceDeclaration.class); - } - - List getStatementsByType(final Class type) { - final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); - for (final Statement statement : this.statements) { - statement.accept(visitor); - } - return Collections.unmodifiableList(visitor.getExtractedStatements()); - } - - /** - * Add a single fact to the internal data structures. It is assumed that it has - * already been checked that this fact is not present yet. - * - * @param fact the fact to add - */ - void addFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); - this.factsByPredicate.get(predicate).add(fact); - } - - /** - * Removes a single fact from the internal data structure. It is assumed that it - * has already been checked that this fact is already present. - * - * @param fact the fact to remove - */ - void removeFact(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - final Set facts = this.factsByPredicate.get(predicate); - facts.remove(fact); - if (facts.isEmpty()) { - this.factsByPredicate.remove(predicate); - } - } - - /** - * Returns all {@link Statement}s of this knowledge base. - * - * The result can be iterated over and will return statements in the original - * order. The collection is read-only and cannot be modified to add or delete - * statements. - * - * @return a collection of statements - */ - public Collection getStatements() { - return Collections.unmodifiableCollection(this.statements); - } - - @Override - public Iterator iterator() { - return Collections.unmodifiableCollection(this.statements).iterator(); - } - - Map> getFactsByPredicate() { - return this.factsByPredicate; - } - -} \ No newline at end of file +package org.semanticweb.vlog4j.core.reasoner; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A knowledge base with rules, facts, and declarations for loading data from + * further sources. This is a "syntactic" object in that it represents some + * information that is not relevant for the semantics of reasoning, but that is + * needed to ensure faithful re-serialisation of knowledge bases loaded from + * files (e.g., preserving order). + * + * @author Markus Kroetzsch + * + */ +public class KnowledgeBase implements Iterable { + + private final Set listeners = new HashSet<>(); + + /** + * all (canonical) file paths imported so far, used to prevent cyclic imports. + */ + private final Set importedFilePaths = new HashSet<>(); + + /** + * Auxiliary class to process {@link Statement}s when added to the knowledge + * base. Returns true if a statement was added successfully. + * + * @author Markus Kroetzsch + * + */ + private class AddStatementVisitor implements StatementVisitor { + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.addFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.add(statement); + return true; + } + } + + private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + + /** + * Auxiliary class to process {@link Statement}s when removed from the knowledge + * base. Returns true if a statement was removed successfully. + * + * @author Irina Dragoste + * + */ + private class RemoveStatementVisitor implements StatementVisitor { + + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.removeFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.remove(statement); + return true; + } + } + + private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); + + private class ExtractStatementsVisitor implements StatementVisitor { + + final ArrayList extracted = new ArrayList<>(); + final Class ownType; + + ExtractStatementsVisitor(final Class type) { + this.ownType = type; + } + + ArrayList getExtractedStatements() { + return this.extracted; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Fact statement) { + if (this.ownType.equals(Fact.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Rule statement) { + if (this.ownType.equals(Rule.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final DataSourceDeclaration statement) { + if (this.ownType.equals(DataSourceDeclaration.class)) { + this.extracted.add((T) statement); + } + return null; + } + } + + /** + * The primary storage for the contents of the knowledge base. + */ + private final LinkedHashSet statements = new LinkedHashSet<>(); + +// TODO support prefixes +// /** +// * Known prefixes that can be used to pretty-print the contents of the knowledge +// * base. We try to preserve user-provided prefixes found in files when loading +// * data. +// */ +// PrefixDeclarations prefixDeclarations; + + /** + * Index structure that organises all facts by their predicate. + */ + private final Map> factsByPredicate = new HashMap<>(); + + /** + * Index structure that holds all data source declarations of this knowledge + * base. + */ + private final Set dataSourceDeclarations = new HashSet<>(); + + /** + * Registers a listener for changes on the knowledge base + * + * @param listener + */ + public void addListener(final KnowledgeBaseListener listener) { + this.listeners.add(listener); + } + + /** + * Unregisters given listener from changes on the knowledge base + * + * @param listener + */ + public void deleteListener(final KnowledgeBaseListener listener) { + this.listeners.remove(listener); + + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + */ + public void addStatement(final Statement statement) { + if (this.doAddStatement(statement)) { + this.notifyListenersOnStatementAdded(statement); + } + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + * @return true, if the knowledge base has changed. + */ + boolean doAddStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); + return true; + } + return false; + } + + /** + * Adds a collection of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Collection statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Adds a list of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Statement... statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + */ + public void removeStatement(final Statement statement) { + if (this.doRemoveStatement(statement)) { + this.notifyListenersOnStatementRemoved(statement); + } + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + * @return true, if the knowledge base has changed. + */ + boolean doRemoveStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + + if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { + this.statements.remove(statement); + return true; + } + return false; + } + + /** + * Removes a collection of statements to the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Collection statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + /** + * Removes a list of statements from the knowledge base. + * + * @param statements the statements to remove + */ + public void removeStatements(final Statement... statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + } + + private void notifyListenersOnStatementAdded(final Statement addedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementAdded(addedStatement); + } + } + + private void notifyListenersOnStatementsAdded(final List addedStatements) { + if (!addedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } + } + } + + private void notifyListenersOnStatementRemoved(final Statement removedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementRemoved(removedStatement); + } + } + + private void notifyListenersOnStatementsRemoved(final List removedStatements) { + if (!removedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsRemoved(removedStatements); + } + } + } + + /** + * Get the list of all rules that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete rules. + * + * @return list of {@link Rule}s + */ + public List getRules() { + return this.getStatementsByType(Rule.class); + } + + /** + * Get the list of all facts that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete facts. + * + * @return list of {@link Fact}s + */ + public List getFacts() { + return this.getStatementsByType(Fact.class); + } + + /** + * Get the list of all data source declarations that have been added to the + * knowledge base. The list is read-only and cannot be modified to add or delete + * facts. + * + * @return list of {@link DataSourceDeclaration}s + */ + public List getDataSourceDeclarations() { + return this.getStatementsByType(DataSourceDeclaration.class); + } + + List getStatementsByType(final Class type) { + final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); + for (final Statement statement : this.statements) { + statement.accept(visitor); + } + return Collections.unmodifiableList(visitor.getExtractedStatements()); + } + + /** + * Add a single fact to the internal data structures. It is assumed that it has + * already been checked that this fact is not present yet. + * + * @param fact the fact to add + */ + void addFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); + this.factsByPredicate.get(predicate).add(fact); + } + + /** + * Removes a single fact from the internal data structure. It is assumed that it + * has already been checked that this fact is already present. + * + * @param fact the fact to remove + */ + void removeFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + final Set facts = this.factsByPredicate.get(predicate); + facts.remove(fact); + if (facts.isEmpty()) { + this.factsByPredicate.remove(predicate); + } + } + + /** + * Returns all {@link Statement}s of this knowledge base. + * + * The result can be iterated over and will return statements in the original + * order. The collection is read-only and cannot be modified to add or delete + * statements. + * + * @return a collection of statements + */ + public Collection getStatements() { + return Collections.unmodifiableCollection(this.statements); + } + + @Override + public Iterator iterator() { + return Collections.unmodifiableCollection(this.statements).iterator(); + } + + Map> getFactsByPredicate() { + return this.factsByPredicate; + } + + /** + * Import rules from a file. + * + * @param file the file to import + * @param parseFunction a function that transforms a {@link KnowledgeBase} using the {@link InputStream}. + * + * @throws IOException when reading {@code file} fails + * @throws IllegalArgumentException when {@code file} is null or has already been imported + * @throws RuntimeException when parseFunction throws + * + */ + public KnowledgeBase importRulesFile(File file, + BiFunction parseFunction) + throws RuntimeException, IOException, IllegalArgumentException { + Validate.notNull(file, "file must not be null"); + + boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); + Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); + + try (InputStream stream = new FileInputStream(file)) { + return parseFunction.apply(stream, this); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java index 4334cfb01..3be532ba7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -21,8 +21,8 @@ */ import java.io.File; -import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.util.List; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -39,25 +39,26 @@ * @author Maximilian Marx */ public class ImportFileDirectiveHandler implements DirectiveHandler { + @Override public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); - FileInputStream stream; - - // @todo(mx): should we try to prevent cyclic imports? - try { - stream = new FileInputStream(file); - } catch (IOException e) { - throw new ParsingException("Failed to read rules from \"" + file.getName() + "\"", e); - } - KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); - RuleParser.parseInto(knowledgeBase, stream, parserConfiguration); - - return knowledgeBase; + try { + return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + try { + RuleParser.parseInto(kb, stream, parserConfiguration); + } catch (ParsingException e) { + throw new RuntimeException(e); + } + return kb; + }); + } catch (RuntimeException | IOException e) { + throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); + } } } From bdefe1d959a11bf07cee3450139cc449f9287fed Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 10 Feb 2020 19:04:42 +0100 Subject: [PATCH 0785/1255] Parser: support @import-relative statements respecting current @base --- .../parser/DefaultParserConfiguration.java | 8 ++- .../vlog4j/parser/DirectiveHandler.java | 14 ++++ .../semanticweb/vlog4j/parser/RuleParser.java | 36 ++++++++-- .../ImportFileRelativeDirectiveHandler.java | 67 +++++++++++++++++++ .../parser/javacc/JavaCCParserBase.java | 4 +- .../vlog4j/syntax/parser/RuleParserTest.java | 17 +++++ 6 files changed, 139 insertions(+), 7 deletions(-) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java index 383cca87c..625f6f87c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java @@ -25,6 +25,7 @@ import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.directives.ImportFileDirectiveHandler; +import org.semanticweb.vlog4j.parser.directives.ImportFileRelativeDirectiveHandler; /** * Default parser configuration. Registers default data sources. @@ -35,7 +36,7 @@ public class DefaultParserConfiguration extends ParserConfiguration { public DefaultParserConfiguration() { super(); registerDefaultDataSources(); - registerDirective("import", new ImportFileDirectiveHandler()); + registerDefaultDirectives(); } /** @@ -47,4 +48,9 @@ private void registerDefaultDataSources() { registerDataSource(Serializer.SPARQL_QUERY_RESULT_DATA_SOURCE, new SparqlQueryResultDataSourceDeclarationHandler()); } + + private void registerDefaultDirectives() { + registerDirective("import", new ImportFileDirectiveHandler()); + registerDirective("import-relative", new ImportFileRelativeDirectiveHandler()); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 2c5cd6954..2a8c4a070 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -26,6 +26,7 @@ import java.util.List; import java.util.NoSuchElementException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; @@ -181,4 +182,17 @@ default ParserConfiguration getParserConfiguration(SubParserFactory subParserFac return subParser.getParserConfiguration(); } + + /** + * Obtain {@link PrefixDeclarations} from a {@link SubParserFactory}. + * + * @argument subParserFactory the SubParserFactory. + * + * @return the prefix declarations. + */ + default PrefixDeclarations getPrefixDeclarations(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getPrefixDeclarations(); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 6b7a5eec2..d1395001e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -25,12 +25,12 @@ import java.util.List; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Entity; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -54,18 +54,46 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, - final ParserConfiguration parserConfiguration) throws ParsingException { + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { final JavaCCParser parser = new JavaCCParser(stream, encoding); + + if (baseIri != null) { + PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); + + try { + prefixDeclarations.setBase(baseIri); + } catch (PrefixDeclarationException e) { + throw new ParsingException("Invalid base IRI \"" + baseIri + "\"", e); + } + parser.setPrefixDeclarations(prefixDeclarations); + } + parser.setKnowledgeBase(knowledgeBase); parser.setParserConfiguration(parserConfiguration); doParse(parser); } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + parseInto(knowledgeBase, stream, encoding, parserConfiguration, null); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration, baseIri); + } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final ParserConfiguration parserConfiguration) throws ParsingException { parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration); } + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration, baseIri); + } + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); @@ -217,8 +245,8 @@ public static Term parseTerm(final String input) throws ParsingException { return parseTerm(input, (ParserConfiguration) null); } - public static DataSourceDeclaration parseDataSourceDeclaration(final String input, ParserConfiguration parserConfiguration) - throws ParsingException { + public static DataSourceDeclaration parseDataSourceDeclaration(final String input, + ParserConfiguration parserConfiguration) throws ParsingException { return parseSyntaxFragment(input, RuleParser::parseAndExtractDatasourceDeclaration, "data source declaration", parserConfiguration); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java new file mode 100644 index 000000000..9036699b7 --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -0,0 +1,67 @@ +package org.semanticweb.vlog4j.parser.directives; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.api.TermType; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.DirectiveArgument; +import org.semanticweb.vlog4j.parser.DirectiveHandler; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@code @import-relative} statements. + * + * @author Maximilian Marx + */ +public class ImportFileRelativeDirectiveHandler implements DirectiveHandler { + @Override + public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + DirectiveHandler.validateNumberOfArguments(arguments, 1); + PrefixDeclarations prefixDeclarations = getPrefixDeclarations(subParserFactory); + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); + KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + + try { + return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + try { + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); + } catch (ParsingException e) { + throw new RuntimeException(e); + } + return kb; + }); + } catch (RuntimeException | IOException e) { + throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); + } + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 7ebeb6e9d..2d6e9acaf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -310,11 +310,11 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { + public void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; } - PrefixDeclarations getPrefixDeclarations() { + public PrefixDeclarations getPrefixDeclarations() { return prefixDeclarations; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 62e87cdc5..63ab37258 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -62,6 +62,7 @@ public class RuleParserTest { private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", x, d); private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", c); private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", abc); + private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", abc); private final Conjunction body1 = Expressions.makeConjunction(atom1, atom2); private final Conjunction body2 = Expressions.makeConjunction(negAtom1, atom2); private final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); @@ -449,4 +450,20 @@ public void parse_importStatement_succeeds() throws ParsingException { List result = knowledgeBase.getFacts(); assertEquals(expected, result); } + + @Test + public void parse_relativeImportStatement_suceeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact3); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test(expected = ParsingException.class) + public void parseInto_duplicateImportStatements_throws() throws ParsingException { + String input = "@import \"src/test/resources/facts.rls\" . "; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + RuleParser.parseInto(knowledgeBase, input); + } } From afa8e7e0861dda002e2e6701da6340ea3ecf3317 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 17:31:40 +0100 Subject: [PATCH 0786/1255] Core: Fix style --- .../implementation/InMemoryDataSource.java | 22 +- .../implementation/VLogKnowledgeBase.java | 604 +++---- .../reasoner/implementation/VLogReasoner.java | 1411 +++++++++-------- 3 files changed, 1017 insertions(+), 1020 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java index 947b78078..c628cd023 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ * the object will directly accept tuples of constant names that are internally * stored in a form that can be passed to the reasoner directly, thereby saving * memory and loading time. - * + * * @author Markus Kroetzsch * */ @@ -47,11 +47,9 @@ public class InMemoryDataSource implements DataSource { * given capacity is the initial size of the space allocated. For best * efficiency, the actual number of facts should exactly correspond to this * capacity. - * - * @param arity - * the number of parameters in a fact from this source - * @param initialCapacity - * the planned number of facts + * + * @param arity the number of parameters in a fact from this source + * @param initialCapacity the planned number of facts */ public InMemoryDataSource(final int arity, final int initialCapacity) { this.capacity = initialCapacity; @@ -62,9 +60,8 @@ public InMemoryDataSource(final int arity, final int initialCapacity) { /** * Adds a fact to this data source. The number of constant names must agree with * the arity of this data source. - * - * @param constantNames - * the string names of the constants in this fact + * + * @param constantNames the string names of the constants in this fact */ public void addTuple(final String... constantNames) { if (constantNames.length != this.arity) { @@ -85,7 +82,7 @@ public void addTuple(final String... constantNames) { /** * Returns the data stored in this data source, in the format expected by the * VLog reasoner backend. - * + * * @return the data */ public String[][] getData() { @@ -108,5 +105,4 @@ public String getSyntacticRepresentation() { } return sb.toString(); } - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index 232ecafe9..cc192cce2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -1,302 +1,302 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 - 2020 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; - -/** - * Class for organizing a Knowledge Base using vLog-specific data structures. - * - * @author Irina Dragoste - * - */ -public class VLogKnowledgeBase { - - private final Map edbPredicates = new HashMap<>(); - private final Map aliasesForEdbPredicates = new HashMap<>(); - - private final Set aliasedEdbPredicates = new HashSet<>(); - - private final Set idbPredicates = new HashSet<>(); - - private final Map> directEdbFacts = new HashMap<>(); - - private final Set rules = new HashSet<>(); - - /** - * Package-protected constructor, that organizes given {@code knowledgeBase} in - * vLog-specific data structures. - * - * @param knowledgeBase - */ - VLogKnowledgeBase(final KnowledgeBase knowledgeBase) { - final LoadKbVisitor visitor = this.new LoadKbVisitor(); - visitor.clearIndexes(); - for (final Statement statement : knowledgeBase) { - statement.accept(visitor); - } - } - - boolean hasData() { - return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); - } - - public boolean hasRules() { - return !this.rules.isEmpty(); - } - - Predicate getAlias(final Predicate predicate) { - if (this.edbPredicates.containsKey(predicate)) { - return predicate; - } else { - return this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); - } - } - - String getVLogDataSourcesConfigurationString() { - final StringBuilder configStringBuilder = new StringBuilder(); - final Formatter formatter = new Formatter(configStringBuilder); - int dataSourceIndex = 0; - - for (final Entry e : this.edbPredicates.entrySet()) { - dataSourceIndex = addDataSourceConfigurationString(e.getValue().getDataSource(), e.getKey(), - dataSourceIndex, formatter); - } - - for (final Entry e : this.aliasesForEdbPredicates.entrySet()) { - dataSourceIndex = addDataSourceConfigurationString(e.getKey().getDataSource(), e.getValue(), - dataSourceIndex, formatter); - } - - formatter.close(); - return configStringBuilder.toString(); - } - - int addDataSourceConfigurationString(final DataSource dataSource, final Predicate predicate, - final int dataSourceIndex, final Formatter formatter) { - int newDataSourceIndex = dataSourceIndex; - - if (dataSource != null) { - if (dataSource instanceof VLogDataSource) { - final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; - final String configString = vLogDataSource.toConfigString(); - if (configString != null) { - formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); - newDataSourceIndex++; - } - } - } - - return newDataSourceIndex; - } - - Map getEdbPredicates() { - return this.edbPredicates; - } - - Map getAliasesForEdbPredicates() { - return this.aliasesForEdbPredicates; - } - - Map> getDirectEdbFacts() { - return this.directEdbFacts; - } - - Set getRules() { - return this.rules; - } - - /** - * - * Local visitor implementation for processing statements upon loading. Internal - * index structures are updated based on the statements that are detected. - * - * @author Markus Kroetzsch - */ - - class LoadKbVisitor implements StatementVisitor { - - public void clearIndexes() { - VLogKnowledgeBase.this.edbPredicates.clear(); - VLogKnowledgeBase.this.idbPredicates.clear(); - VLogKnowledgeBase.this.aliasedEdbPredicates.clear(); - VLogKnowledgeBase.this.aliasesForEdbPredicates.clear(); - VLogKnowledgeBase.this.directEdbFacts.clear(); - VLogKnowledgeBase.this.rules.clear(); - } - - @Override - public Void visit(final Fact fact) { - final Predicate predicate = fact.getPredicate(); - registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); - if (!VLogKnowledgeBase.this.directEdbFacts.containsKey(predicate)) { - final List facts = new ArrayList<>(); - facts.add(fact); - VLogKnowledgeBase.this.directEdbFacts.put(predicate, facts); - } else { - VLogKnowledgeBase.this.directEdbFacts.get(predicate).add(fact); - } - return null; - } - - @Override - public Void visit(final Rule statement) { - VLogKnowledgeBase.this.rules.add(statement); - for (final PositiveLiteral positiveLiteral : statement.getHead()) { - final Predicate predicate = positiveLiteral.getPredicate(); - if (!VLogKnowledgeBase.this.idbPredicates.contains(predicate)) { - if (VLogKnowledgeBase.this.edbPredicates.containsKey(predicate)) { - addEdbAlias(VLogKnowledgeBase.this.edbPredicates.get(predicate)); - VLogKnowledgeBase.this.edbPredicates.remove(predicate); - } - VLogKnowledgeBase.this.idbPredicates.add(predicate); - } - } - return null; - } - - @Override - public Void visit(final DataSourceDeclaration statement) { - registerEdbDeclaration(statement); - return null; - } - - void registerEdbDeclaration(final DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - if (VLogKnowledgeBase.this.idbPredicates.contains(predicate) - || VLogKnowledgeBase.this.aliasedEdbPredicates.contains(predicate)) { - if (!VLogKnowledgeBase.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { - addEdbAlias(dataSourceDeclaration); - } - } else { - final DataSourceDeclaration currentMainDeclaration = VLogKnowledgeBase.this.edbPredicates.get(predicate); - if (currentMainDeclaration == null) { - VLogKnowledgeBase.this.edbPredicates.put(predicate, dataSourceDeclaration); - } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { - addEdbAlias(currentMainDeclaration); - addEdbAlias(dataSourceDeclaration); - VLogKnowledgeBase.this.edbPredicates.remove(predicate); - } // else: predicate already known to have local facts (only) - } - } - - void addEdbAlias(final DataSourceDeclaration dataSourceDeclaration) { - final Predicate predicate = dataSourceDeclaration.getPredicate(); - Predicate aliasPredicate; - if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { - aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); - } else { - aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), - predicate.getArity()); - } - VLogKnowledgeBase.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); - VLogKnowledgeBase.this.aliasedEdbPredicates.add(predicate); - - final List terms = new ArrayList<>(); - for (int i = 1; i <= predicate.getArity(); i++) { - terms.add(new UniversalVariableImpl("X" + i)); - } - final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); - final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); - final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), - new ConjunctionImpl<>(Arrays.asList(body))); - VLogKnowledgeBase.this.rules.add(rule); - } - - } - - /** - * Dummy data source declaration for predicates for which we have explicit local - * facts in the input. - * - * @author Markus Kroetzsch - * - */ - class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { - - final Predicate predicate; - - public LocalFactsDataSourceDeclaration(Predicate predicate) { - this.predicate = predicate; - } - - @Override - public T accept(StatementVisitor statementVisitor) { - return statementVisitor.visit(this); - } - - @Override - public Predicate getPredicate() { - return this.predicate; - } - - @Override - public DataSource getDataSource() { - return null; - } - - @Override - public int hashCode() { - return this.predicate.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; - return this.predicate.equals(other.predicate); - } - } - -} +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 - 2020 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; +import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; + +/** + * Class for organizing a Knowledge Base using vLog-specific data structures. + * + * @author Irina Dragoste + * + */ +public class VLogKnowledgeBase { + + private final Map edbPredicates = new HashMap<>(); + private final Map aliasesForEdbPredicates = new HashMap<>(); + + private final Set aliasedEdbPredicates = new HashSet<>(); + + private final Set idbPredicates = new HashSet<>(); + + private final Map> directEdbFacts = new HashMap<>(); + + private final Set rules = new HashSet<>(); + + /** + * Package-protected constructor, that organizes given {@code knowledgeBase} in + * vLog-specific data structures. + * + * @param knowledgeBase + */ + VLogKnowledgeBase(final KnowledgeBase knowledgeBase) { + final LoadKbVisitor visitor = this.new LoadKbVisitor(); + visitor.clearIndexes(); + for (final Statement statement : knowledgeBase) { + statement.accept(visitor); + } + } + + boolean hasData() { + return !this.edbPredicates.isEmpty() && !this.aliasedEdbPredicates.isEmpty(); + } + + public boolean hasRules() { + return !this.rules.isEmpty(); + } + + Predicate getAlias(final Predicate predicate) { + if (this.edbPredicates.containsKey(predicate)) { + return predicate; + } else { + return this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + } + + String getVLogDataSourcesConfigurationString() { + final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); + int dataSourceIndex = 0; + + for (final Entry e : this.edbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getValue().getDataSource(), e.getKey(), + dataSourceIndex, formatter); + } + + for (final Entry e : this.aliasesForEdbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getKey().getDataSource(), e.getValue(), + dataSourceIndex, formatter); + } + + formatter.close(); + return configStringBuilder.toString(); + } + + int addDataSourceConfigurationString(final DataSource dataSource, final Predicate predicate, + final int dataSourceIndex, final Formatter formatter) { + int newDataSourceIndex = dataSourceIndex; + + if (dataSource != null) { + if (dataSource instanceof VLogDataSource) { + final VLogDataSource vLogDataSource = (VLogDataSource) dataSource; + final String configString = vLogDataSource.toConfigString(); + if (configString != null) { + formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); + newDataSourceIndex++; + } + } + } + + return newDataSourceIndex; + } + + Map getEdbPredicates() { + return this.edbPredicates; + } + + Map getAliasesForEdbPredicates() { + return this.aliasesForEdbPredicates; + } + + Map> getDirectEdbFacts() { + return this.directEdbFacts; + } + + Set getRules() { + return this.rules; + } + + /** + * + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + */ + + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + VLogKnowledgeBase.this.edbPredicates.clear(); + VLogKnowledgeBase.this.idbPredicates.clear(); + VLogKnowledgeBase.this.aliasedEdbPredicates.clear(); + VLogKnowledgeBase.this.aliasesForEdbPredicates.clear(); + VLogKnowledgeBase.this.directEdbFacts.clear(); + VLogKnowledgeBase.this.rules.clear(); + } + + @Override + public Void visit(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!VLogKnowledgeBase.this.directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList<>(); + facts.add(fact); + VLogKnowledgeBase.this.directEdbFacts.put(predicate, facts); + } else { + VLogKnowledgeBase.this.directEdbFacts.get(predicate).add(fact); + } + return null; + } + + @Override + public Void visit(final Rule statement) { + VLogKnowledgeBase.this.rules.add(statement); + for (final PositiveLiteral positiveLiteral : statement.getHead()) { + final Predicate predicate = positiveLiteral.getPredicate(); + if (!VLogKnowledgeBase.this.idbPredicates.contains(predicate)) { + if (VLogKnowledgeBase.this.edbPredicates.containsKey(predicate)) { + addEdbAlias(VLogKnowledgeBase.this.edbPredicates.get(predicate)); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } + VLogKnowledgeBase.this.idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(final DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + if (VLogKnowledgeBase.this.idbPredicates.contains(predicate) + || VLogKnowledgeBase.this.aliasedEdbPredicates.contains(predicate)) { + if (!VLogKnowledgeBase.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + final DataSourceDeclaration currentMainDeclaration = VLogKnowledgeBase.this.edbPredicates + .get(predicate); + if (currentMainDeclaration == null) { + VLogKnowledgeBase.this.edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), + predicate.getArity()); + } + VLogKnowledgeBase.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + VLogKnowledgeBase.this.aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new UniversalVariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), + new ConjunctionImpl<>(Arrays.asList(body))); + VLogKnowledgeBase.this.rules.add(rule); + } + + } + + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return this.predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return this.predicate.equals(other.predicate); + } + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index e8db05863..b48b19f50 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,705 +1,706 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.MaterializationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.CyclicCheckResult; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Reasoner implementation using the VLog backend. - * - * - * - * @author Markus Kroetzsch - * - */ -public class VLogReasoner implements Reasoner { - private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - - final KnowledgeBase knowledgeBase; - final VLog vLog = new VLog(); - - private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; - private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; - - private LogLevel internalLogLevel = LogLevel.WARNING; - private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; - private Integer timeoutAfterSeconds; - private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; - - /** - * Holds the state of the reasoning result. Has value {@code true} if reasoning - * has completed, {@code false} if it has been interrupted. - */ - private boolean reasoningCompleted; - - public VLogReasoner(KnowledgeBase knowledgeBase) { - super(); - this.knowledgeBase = knowledgeBase; - this.knowledgeBase.addListener(this); - - setLogLevel(this.internalLogLevel); - } - - @Override - public KnowledgeBase getKnowledgeBase() { - return this.knowledgeBase; - } - - @Override - public void setAlgorithm(final Algorithm algorithm) { - Validate.notNull(algorithm, "Algorithm cannot be null!"); - validateNotClosed(); - this.algorithm = algorithm; - } - - @Override - public Algorithm getAlgorithm() { - return this.algorithm; - } - - @Override - public void setReasoningTimeout(Integer seconds) { - validateNotClosed(); - if (seconds != null) { - Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); - } - this.timeoutAfterSeconds = seconds; - } - - @Override - public Integer getReasoningTimeout() { - return this.timeoutAfterSeconds; - } - - @Override - public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { - validateNotClosed(); - Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); - this.ruleRewriteStrategy = ruleRewritingStrategy; - } - - @Override - public RuleRewriteStrategy getRuleRewriteStrategy() { - return this.ruleRewriteStrategy; - } - - /* - * TODO Due to automatic predicate renaming, it can happen that an EDB predicate - * cannot be queried after loading unless reasoning has already been invoked - * (since the auxiliary rule that imports the EDB facts to the "real" predicate - * must be used). This issue could be weakened by rewriting queries to - * (single-source) EDB predicates internally when in such a state, - */ - // @Override - void load() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - loadKnowledgeBase(); - break; - case KB_LOADED: - case MATERIALISED: - // do nothing, all KB is already loaded - break; - case KB_CHANGED: - resetReasoner(); - loadKnowledgeBase(); - default: - break; - } - } - - void loadKnowledgeBase() throws IOException { - LOGGER.info("Started loading knowledge base ..."); - - final VLogKnowledgeBase vLogKB = new VLogKnowledgeBase(this.knowledgeBase); - - if (!vLogKB.hasData()) { - LOGGER.warn("No data statements (facts or datasource declarations) have been provided."); - } - - // 1. vLog is initialized by loading VLog data sources - loadVLogDataSources(vLogKB); - - // 2. in-memory data is loaded - loadInMemoryDataSources(vLogKB); - validateDataSourcePredicateArities(vLogKB); - - loadFacts(vLogKB); - - // 3. rules are loaded - loadRules(vLogKB); - - this.reasonerState = ReasonerState.KB_LOADED; - - // if there are no rules, then materialisation state is complete - this.correctness = !vLogKB.hasRules() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; - - LOGGER.info("Finished loading knowledge base."); - } - - void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { - try { - this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } - } - - void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { - vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); - - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); - } - - void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { - if (dataSource instanceof InMemoryDataSource) { - - final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; - try { - load(predicate, inMemoryDataSource); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - } - } - - void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); - - this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); - - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : inMemoryDataSource.getData()) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } - - /** - * Checks if the loaded external data sources do in fact contain data of the - * correct arity. - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { - - vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); - - vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); - } - - /** - * Checks if the loaded external data for a given source does in fact contain - * data of the correct arity for the given predidate. - * - * @param predicate the predicate for which data is loaded - * @param dataSource the data source used - * - * @throws IncompatiblePredicateArityException to indicate a problem - * (non-checked exception) - */ - void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) - throws IncompatiblePredicateArityException { - if (dataSource == null) { - return; - } - try { - final int dataSourcePredicateArity = this.vLog - .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); - if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); - } else if (predicate.getArity() != dataSourcePredicateArity) { - throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - void loadFacts(final VLogKnowledgeBase vLogKB) { - final Map> directEdbFacts = vLogKB.getDirectEdbFacts(); - - directEdbFacts.forEach((k, v) -> { - try { - final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(vLogKB.getAlias(k)); - final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(v); - - this.vLog.addData(vLogPredicateName, vLogPredicateTuples); - - if (LOGGER.isDebugEnabled()) { - for (final String[] tuple : vLogPredicateTuples) { - LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); - } - } - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); - } - - }); - } - - void loadRules(final VLogKnowledgeBase vLogKB) { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(vLogKB.getRules()); - final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter - .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); - try { - this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); - if (LOGGER.isDebugEnabled()) { - for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { - LOGGER.debug("Loaded rule {}.", rule.toString()); - } - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } - } - - @Override - public boolean reason() throws IOException { - validateNotClosed(); - - switch (this.reasonerState) { - case KB_NOT_LOADED: - load(); - runChase(); - break; - case KB_LOADED: - runChase(); - break; - case KB_CHANGED: - resetReasoner(); - load(); - runChase(); - break; - case MATERIALISED: - runChase(); - break; - default: - break; - } - - return this.reasoningCompleted; - } - - private void runChase() { - LOGGER.info("Started materialisation of inferences ..."); - this.reasonerState = ReasonerState.MATERIALISED; - - final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; - try { - if (this.timeoutAfterSeconds == null) { - this.vLog.materialize(skolemChase); - this.reasoningCompleted = true; - } else { - this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final MaterializationException e) { - // FIXME: the message generated here is not guaranteed to be the correct - // interpretation of the exception that is caught - throw new RuntimeException( - "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", - e); - } - - if (this.reasoningCompleted) { - this.correctness = Correctness.SOUND_AND_COMPLETE; - LOGGER.info("Completed materialisation of inferences."); - } else { - this.correctness = Correctness.SOUND_BUT_INCOMPLETE; - LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); - } - } - - @Override - public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); - - final boolean filterBlanks = !includeNulls; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - - TermQueryResultIterator stringQueryResultIterator; - try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); - } - - logWarningOnCorrectness(); - return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); - } - - @Override - public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { - validateBeforeQuerying(query); - - final boolean filterBlanks = !includeNulls; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - - long result; - try { - result = this.vLog.querySize(vLogAtom, true, filterBlanks); - } catch (NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (NonExistingPredicateException e) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answer must be empty!"); - result = 0; - } - logWarningOnCorrectness(); - return new QueryAnswerCountImpl(this.correctness, result); - } - - @Override - public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, - final boolean includeBlanks) throws IOException { - validateBeforeQuerying(query); - Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); - Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); - - final boolean filterBlanks = !includeBlanks; - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); - try { - this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); - } catch (final NonExistingPredicateException e1) { - LOGGER.warn("Query uses predicate " + query.getPredicate() - + " that does not occur in the knowledge base. Answers are therefore empty."); - } - - logWarningOnCorrectness(); - return this.correctness; - } - - private void validateBeforeQuerying(final PositiveLiteral query) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(query, "Query atom must not be null!"); - } - - @Override - public Correctness writeInferences(OutputStream stream) throws IOException { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Obtaining inferences is not alowed before reasoner is loaded!"); - } - final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); - - for (final Predicate predicate : toBeQueriedHeadPredicates) { - final PositiveLiteral queryAtom = getQueryAtom(predicate); - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); - try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { - while (answers.hasNext()) { - final karmaresearch.vlog.Term[] vlogTerms = answers.next(); - final List termList = VLogToModelConverter.toTermList(vlogTerms); - stream.write(Serializer.getFactString(predicate, termList).getBytes()); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final NonExistingPredicateException e1) { - throw new RuntimeException("Inconsistent knowledge base state.", e1); - } - } - - logWarningOnCorrectness(); - return this.correctness; - } - - @Override - public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - return writeInferences(stream); - } - } - - private void logWarningOnCorrectness() { - if (this.correctness != Correctness.SOUND_AND_COMPLETE) { - LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); - } - } - - @Override - public void resetReasoner() { - validateNotClosed(); - this.reasonerState = ReasonerState.KB_NOT_LOADED; - this.vLog.stop(); - LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); - } - - @Override - public void close() { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.info("Reasoner is already closed."); - } else { - this.reasonerState = ReasonerState.CLOSED; - this.knowledgeBase.deleteListener(this); - this.vLog.stop(); - LOGGER.info("Reasoner closed."); - } - } - - @Override - public void setLogLevel(LogLevel logLevel) { - validateNotClosed(); - Validate.notNull(logLevel, "Log level cannot be null!"); - this.internalLogLevel = logLevel; - this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); - } - - @Override - public LogLevel getLogLevel() { - return this.internalLogLevel; - } - - @Override - public void setLogFile(String filePath) { - validateNotClosed(); - this.vLog.setLogFile(filePath); - } - - @Override - public boolean isJA() { - return checkAcyclicity(AcyclicityNotion.JA); - } - - @Override - public boolean isRJA() { - return checkAcyclicity(AcyclicityNotion.RJA); - } - - @Override - public boolean isMFA() { - return checkAcyclicity(AcyclicityNotion.MFA); - } - - @Override - public boolean isRMFA() { - return checkAcyclicity(AcyclicityNotion.RMFA); - } - - @Override - public boolean isMFC() { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - throw new ReasonerStateException(this.reasonerState, - "Checking rules acyclicity is not allowed before loading!"); - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic("MFC"); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.CYCLIC); - } - - @Override - public CyclicityResult checkForCycles() { - final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); - if (acyclic) { - return CyclicityResult.ACYCLIC; - } else { - final boolean cyclic = isMFC(); - if (cyclic) { - return CyclicityResult.CYCLIC; - } - return CyclicityResult.UNDETERMINED; - } - } - - @Override - public void onStatementsAdded(List statementsAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementsAdded(statementsAdded); - updateCorrectnessOnStatementsAdded(); - } - - @Override - public void onStatementAdded(Statement statementAdded) { - // TODO more elaborate materialisation state handling - - updateReasonerToKnowledgeBaseChanged(); - - // updateCorrectnessOnStatementAdded(statementAdded); - updateCorrectnessOnStatementsAdded(); - } - - @Override - public void onStatementRemoved(Statement statementRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); - } - - @Override - public void onStatementsRemoved(List statementsRemoved) { - updateReasonerToKnowledgeBaseChanged(); - updateCorrectnessOnStatementsRemoved(); - } - - Set getKnolwedgeBasePredicates() { - final Set toBeQueriedHeadPredicates = new HashSet<>(); - for (final Rule rule : this.knowledgeBase.getRules()) { - for (final Literal literal : rule.getHead()) { - toBeQueriedHeadPredicates.add(literal.getPredicate()); - } - } - for (final DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { - toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); - } - for (final Fact fact : this.knowledgeBase.getFacts()) { - toBeQueriedHeadPredicates.add(fact.getPredicate()); - } - return toBeQueriedHeadPredicates; - } - - private PositiveLiteral getQueryAtom(final Predicate predicate) { - final List toBeGroundedVariables = new ArrayList<>(predicate.getArity()); - for (int i = 0; i < predicate.getArity(); i++) { - toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); - } - return Expressions.makePositiveLiteral(predicate, toBeGroundedVariables); - } - - private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { - validateNotClosed(); - if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { - try { - load(); - } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 - throw new RuntimeException(e); - } - } - - CyclicCheckResult checkCyclic; - try { - checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); - } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible - } - return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); - } - - private void updateReasonerToKnowledgeBaseChanged() { - if (this.reasonerState.equals(ReasonerState.KB_LOADED) - || this.reasonerState.equals(ReasonerState.MATERIALISED)) { - - this.reasonerState = ReasonerState.KB_CHANGED; - } - } - - private void updateCorrectnessOnStatementsAdded() { - if (this.reasonerState == ReasonerState.KB_CHANGED) { - // TODO refine - this.correctness = Correctness.INCORRECT; - } - } - - private void updateCorrectnessOnStatementsRemoved() { - if (this.reasonerState == ReasonerState.KB_CHANGED) { - // TODO refine - this.correctness = Correctness.INCORRECT; - } - } - - /** - * Check if reasoner is closed and throw an exception if it is. - * - * @throws ReasonerStateException - */ - void validateNotClosed() throws ReasonerStateException { - if (this.reasonerState == ReasonerState.CLOSED) { - LOGGER.error("Invalid operation requested on a closed reasoner object!"); - throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); - } - } - - ReasonerState getReasonerState() { - return this.reasonerState; - } - - void setReasonerState(ReasonerState reasonerState) { - this.reasonerState = reasonerState; - } -} +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; +import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; +import org.semanticweb.vlog4j.core.reasoner.Algorithm; +import org.semanticweb.vlog4j.core.reasoner.Correctness; +import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.LogLevel; +import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; +import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.CyclicCheckResult; + +/* + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Reasoner implementation using the VLog backend. + * + * + * + * @author Markus Kroetzsch + * + */ +public class VLogReasoner implements Reasoner { + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); + + final KnowledgeBase knowledgeBase; + final VLog vLog = new VLog(); + + private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; + private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; + + private LogLevel internalLogLevel = LogLevel.WARNING; + private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; + private Integer timeoutAfterSeconds; + private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; + + /** + * Holds the state of the reasoning result. Has value {@code true} if reasoning + * has completed, {@code false} if it has been interrupted. + */ + private boolean reasoningCompleted; + + public VLogReasoner(KnowledgeBase knowledgeBase) { + super(); + this.knowledgeBase = knowledgeBase; + this.knowledgeBase.addListener(this); + + setLogLevel(this.internalLogLevel); + } + + @Override + public KnowledgeBase getKnowledgeBase() { + return this.knowledgeBase; + } + + @Override + public void setAlgorithm(final Algorithm algorithm) { + Validate.notNull(algorithm, "Algorithm cannot be null!"); + validateNotClosed(); + this.algorithm = algorithm; + } + + @Override + public Algorithm getAlgorithm() { + return this.algorithm; + } + + @Override + public void setReasoningTimeout(Integer seconds) { + validateNotClosed(); + if (seconds != null) { + Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); + } + this.timeoutAfterSeconds = seconds; + } + + @Override + public Integer getReasoningTimeout() { + return this.timeoutAfterSeconds; + } + + @Override + public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) { + validateNotClosed(); + Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); + this.ruleRewriteStrategy = ruleRewritingStrategy; + } + + @Override + public RuleRewriteStrategy getRuleRewriteStrategy() { + return this.ruleRewriteStrategy; + } + + /* + * TODO Due to automatic predicate renaming, it can happen that an EDB predicate + * cannot be queried after loading unless reasoning has already been invoked + * (since the auxiliary rule that imports the EDB facts to the "real" predicate + * must be used). This issue could be weakened by rewriting queries to + * (single-source) EDB predicates internally when in such a state, + */ + // @Override + void load() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + resetReasoner(); + loadKnowledgeBase(); + default: + break; + } + } + + void loadKnowledgeBase() throws IOException { + LOGGER.info("Started loading knowledge base ..."); + + final VLogKnowledgeBase vLogKB = new VLogKnowledgeBase(this.knowledgeBase); + + if (!vLogKB.hasData()) { + LOGGER.warn("No data statements (facts or datasource declarations) have been provided."); + } + + // 1. vLog is initialized by loading VLog data sources + loadVLogDataSources(vLogKB); + + // 2. in-memory data is loaded + loadInMemoryDataSources(vLogKB); + validateDataSourcePredicateArities(vLogKB); + + loadFacts(vLogKB); + + // 3. rules are loaded + loadRules(vLogKB); + + this.reasonerState = ReasonerState.KB_LOADED; + + // if there are no rules, then materialisation state is complete + this.correctness = !vLogKB.hasRules() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + + LOGGER.info("Finished loading knowledge base."); + } + + void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { + try { + this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration.", e); + } + } + + void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { + vLogKB.getEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(v.getDataSource(), k)); + + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> loadInMemoryDataSource(k.getDataSource(), v)); + } + + void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { + if (dataSource instanceof InMemoryDataSource) { + + final InMemoryDataSource inMemoryDataSource = (InMemoryDataSource) dataSource; + try { + load(predicate, inMemoryDataSource); + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + } + } + + void load(final Predicate predicate, final InMemoryDataSource inMemoryDataSource) throws EDBConfigurationException { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } + + /** + * Checks if the loaded external data sources do in fact contain data of the + * correct arity. + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { + + vLogKB.getEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(k, v.getDataSource())); + + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> validateDataSourcePredicateArity(v, k.getDataSource())); + } + + /** + * Checks if the loaded external data for a given source does in fact contain + * data of the correct arity for the given predidate. + * + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource) + throws IncompatiblePredicateArityException { + if (dataSource == null) { + return; + } + try { + final int dataSourcePredicateArity = this.vLog + .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); + if (dataSourcePredicateArity == -1) { + LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); + } else if (predicate.getArity() != dataSourcePredicateArity) { + throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + void loadFacts(final VLogKnowledgeBase vLogKB) { + final Map> directEdbFacts = vLogKB.getDirectEdbFacts(); + + directEdbFacts.forEach((k, v) -> { + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(vLogKB.getAlias(k)); + final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(v); + + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : vLogPredicateTuples) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RuntimeException("Invalid data sources configuration!", e); + } + + }); + } + + void loadRules(final VLogKnowledgeBase vLogKB) { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(vLogKB.getRules()); + final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter + .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); + try { + this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); + if (LOGGER.isDebugEnabled()) { + for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { + LOGGER.debug("Loaded rule {}.", rule.toString()); + } + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } + } + + @Override + public boolean reason() throws IOException { + validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + load(); + runChase(); + break; + case KB_LOADED: + runChase(); + break; + case KB_CHANGED: + resetReasoner(); + load(); + runChase(); + break; + case MATERIALISED: + runChase(); + break; + default: + break; + } + + return this.reasoningCompleted; + } + + private void runChase() { + LOGGER.info("Started materialisation of inferences ..."); + this.reasonerState = ReasonerState.MATERIALISED; + + final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; + try { + if (this.timeoutAfterSeconds == null) { + this.vLog.materialize(skolemChase); + this.reasoningCompleted = true; + } else { + this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final MaterializationException e) { + // FIXME: the message generated here is not guaranteed to be the correct + // interpretation of the exception that is caught + throw new RuntimeException( + "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", + e); + } + + if (this.reasoningCompleted) { + this.correctness = Correctness.SOUND_AND_COMPLETE; + LOGGER.info("Completed materialisation of inferences."); + } else { + this.correctness = Correctness.SOUND_BUT_INCOMPLETE; + LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); + } + } + + @Override + public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls) { + validateBeforeQuerying(query); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + TermQueryResultIterator stringQueryResultIterator; + try { + stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + return new EmptyQueryResultIterator(Correctness.SOUND_AND_COMPLETE); + } + + logWarningOnCorrectness(); + return new VLogQueryResultIterator(stringQueryResultIterator, this.correctness); + } + + @Override + public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls) { + validateBeforeQuerying(query); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + long result; + try { + result = this.vLog.querySize(vLogAtom, true, filterBlanks); + } catch (NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (NonExistingPredicateException e) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answer must be empty!"); + result = 0; + } + logWarningOnCorrectness(); + return new QueryAnswerCountImpl(this.correctness, result); + } + + @Override + public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + final boolean includeBlanks) throws IOException { + validateBeforeQuerying(query); + + Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); + Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); + + final boolean filterBlanks = !includeBlanks; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + try { + this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state!", e); + } catch (final NonExistingPredicateException e1) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the knowledge base. Answers are therefore empty."); + } + + logWarningOnCorrectness(); + return this.correctness; + } + + private void validateBeforeQuerying(final PositiveLiteral query) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); + } + Validate.notNull(query, "Query atom must not be null!"); + } + + @Override + public Correctness writeInferences(OutputStream stream) throws IOException { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Obtaining inferences is not alowed before reasoner is loaded!"); + } + final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); + + for (final Predicate predicate : toBeQueriedHeadPredicates) { + final PositiveLiteral queryAtom = getQueryAtom(predicate); + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); + try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { + while (answers.hasNext()) { + final karmaresearch.vlog.Term[] vlogTerms = answers.next(); + final List termList = VLogToModelConverter.toTermList(vlogTerms); + stream.write(Serializer.getFactString(predicate, termList).getBytes()); + } + } catch (final NotStartedException e) { + throw new RuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + throw new RuntimeException("Inconsistent knowledge base state.", e1); + } + } + + logWarningOnCorrectness(); + return this.correctness; + } + + @Override + public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + return writeInferences(stream); + } + } + + private void logWarningOnCorrectness() { + if (this.correctness != Correctness.SOUND_AND_COMPLETE) { + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); + } + } + + @Override + public void resetReasoner() { + validateNotClosed(); + this.reasonerState = ReasonerState.KB_NOT_LOADED; + this.vLog.stop(); + LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); + } + + @Override + public void close() { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.info("Reasoner is already closed."); + } else { + this.reasonerState = ReasonerState.CLOSED; + this.knowledgeBase.deleteListener(this); + this.vLog.stop(); + LOGGER.info("Reasoner closed."); + } + } + + @Override + public void setLogLevel(LogLevel logLevel) { + validateNotClosed(); + Validate.notNull(logLevel, "Log level cannot be null!"); + this.internalLogLevel = logLevel; + this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); + } + + @Override + public LogLevel getLogLevel() { + return this.internalLogLevel; + } + + @Override + public void setLogFile(String filePath) { + validateNotClosed(); + this.vLog.setLogFile(filePath); + } + + @Override + public boolean isJA() { + return checkAcyclicity(AcyclicityNotion.JA); + } + + @Override + public boolean isRJA() { + return checkAcyclicity(AcyclicityNotion.RJA); + } + + @Override + public boolean isMFA() { + return checkAcyclicity(AcyclicityNotion.MFA); + } + + @Override + public boolean isRMFA() { + return checkAcyclicity(AcyclicityNotion.RMFA); + } + + @Override + public boolean isMFC() { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Checking rules acyclicity is not allowed before loading!"); + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic("MFC"); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.CYCLIC); + } + + @Override + public CyclicityResult checkForCycles() { + final boolean acyclic = isJA() || isRJA() || isMFA() || isRMFA(); + if (acyclic) { + return CyclicityResult.ACYCLIC; + } else { + final boolean cyclic = isMFC(); + if (cyclic) { + return CyclicityResult.CYCLIC; + } + return CyclicityResult.UNDETERMINED; + } + } + + @Override + public void onStatementsAdded(List statementsAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementsAdded(statementsAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementAdded(Statement statementAdded) { + // TODO more elaborate materialisation state handling + + updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementAdded(statementAdded); + updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementRemoved(Statement statementRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + @Override + public void onStatementsRemoved(List statementsRemoved) { + updateReasonerToKnowledgeBaseChanged(); + updateCorrectnessOnStatementsRemoved(); + } + + Set getKnolwedgeBasePredicates() { + final Set toBeQueriedHeadPredicates = new HashSet<>(); + for (final Rule rule : this.knowledgeBase.getRules()) { + for (final Literal literal : rule.getHead()) { + toBeQueriedHeadPredicates.add(literal.getPredicate()); + } + } + for (final DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { + toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); + } + for (final Fact fact : this.knowledgeBase.getFacts()) { + toBeQueriedHeadPredicates.add(fact.getPredicate()); + } + return toBeQueriedHeadPredicates; + } + + private PositiveLiteral getQueryAtom(final Predicate predicate) { + final List toBeGroundedVariables = new ArrayList<>(predicate.getArity()); + for (int i = 0; i < predicate.getArity(); i++) { + toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); + } + return Expressions.makePositiveLiteral(predicate, toBeGroundedVariables); + } + + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + load(); + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + throw new RuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (final NotStartedException e) { + throw new RuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); + } + + private void updateReasonerToKnowledgeBaseChanged() { + if (this.reasonerState.equals(ReasonerState.KB_LOADED) + || this.reasonerState.equals(ReasonerState.MATERIALISED)) { + + this.reasonerState = ReasonerState.KB_CHANGED; + } + } + + private void updateCorrectnessOnStatementsAdded() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + private void updateCorrectnessOnStatementsRemoved() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + /** + * Check if reasoner is closed and throw an exception if it is. + * + * @throws ReasonerStateException + */ + void validateNotClosed() throws ReasonerStateException { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.error("Invalid operation requested on a closed reasoner object!"); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); + } + } + + ReasonerState getReasonerState() { + return this.reasonerState; + } + + void setReasonerState(ReasonerState reasonerState) { + this.reasonerState = reasonerState; + } +} From bba8681aa401f5691df7b5fd75aecd8158f9aded Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 10 Feb 2020 20:45:10 +0100 Subject: [PATCH 0787/1255] Parser: Track prefix declarations as part of the knowledge base --- .../core/model/api/PrefixDeclarations.java | 13 +- .../MergeablePrefixDeclarations.java | 178 ++++++++++++++++++ .../vlog4j/core/reasoner/KnowledgeBase.java | 46 +++-- .../parser/LocalPrefixDeclarations.java | 12 +- .../semanticweb/vlog4j/parser/RuleParser.java | 4 +- .../ImportFileRelativeDirectiveHandler.java | 2 +- 6 files changed, 228 insertions(+), 27 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java index 857d81160..942c10a60 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,10 +25,10 @@ /** * Registry that manages prefixes and base namespace declarations as used for * parsing and serialising inputs. - * + * * @author Markus Kroetzsch */ -public interface PrefixDeclarations { +public interface PrefixDeclarations extends Iterable { static final String XSD = "http://www.w3.org/2001/XMLSchema#"; static final String XSD_STRING = "http://www.w3.org/2001/XMLSchema#string"; @@ -42,7 +42,7 @@ public interface PrefixDeclarations { /** * Returns the relevant base namespace. This should always return a result, * possibly using a local default value if no base was declared. - * + * * @return string of an absolute base IRI */ String getBase(); @@ -50,7 +50,7 @@ public interface PrefixDeclarations { /** * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. - * + * * @param base the new base namespace * @throws PrefixDeclarationException if base was already defined */ @@ -63,5 +63,4 @@ public interface PrefixDeclarations { String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; String absolutize(String prefixedName) throws PrefixDeclarationException; - } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java new file mode 100644 index 000000000..f586fa9ff --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -0,0 +1,178 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URI; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; + +/** + * Implementation of {@link PrefixDeclarations} that is suitable for + * incrementally parsing from multiple sources. When trying to merge in + * conflicting prefix declarations, a fresh non-conflicting prefix is generated + * instead. + * + * @author Maximilian Marx + */ +final public class MergeablePrefixDeclarations implements PrefixDeclarations { + private Map prefixes = new HashMap<>(); + + private String baseUri = EMPTY_BASE_PREFIX; + private long nextIndex = 0; + + private static final String EMPTY_BASE_PREFIX = ""; + private static final String GENERATED_PREFIX_PREFIX = "vlog4j_generated_"; + + public MergeablePrefixDeclarations() { + } + + public MergeablePrefixDeclarations(final PrefixDeclarations prefixDeclarations) { + super(); + mergePrefixDeclarations(prefixDeclarations); + } + + @Override + public String getBase() { + return baseUri; + } + + @Override + public void setBase(String base) { + if (base != this.baseUri && this.baseUri != EMPTY_BASE_PREFIX) { + prefixes.put(getFreshPrefix(), this.baseUri); + } + + this.baseUri = base; + } + + @Override + public String getPrefix(String prefix) throws PrefixDeclarationException { + if (!prefixes.containsKey(prefix)) { + throw new PrefixDeclarationException("Prefix \"" + prefix + "\" cannot be resolved (not declared yet)."); + } + return prefixes.get(prefix); + } + + @Override + public void setPrefix(String prefix, String iri) { + String prefixName = prefixes.containsKey(prefix) ? getFreshPrefix() : prefix; + prefixes.put(prefixName, iri); + } + + @Override + public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + int colon = prefixedName.indexOf(":"); + String prefix = prefixedName.substring(0, colon + 1); + String suffix = prefixedName.substring(colon + 1); + + return getPrefix(prefix) + suffix; + } + + /** + * Turn an absolute Iri into a (possibly) prefixed name. Dual to + * {@link resolvePrefixedName}. + * + * @param iri an absolute Iri to abbreviate. + * + * @return an abbreviated form of {@code iri} if an appropriate prefix is known, + * or {@code iri}. + */ + public String unresolveAbsoluteIri(String iri) { + Map matches = new HashMap<>(); + + prefixes.forEach((prefixName, baseIri) -> { + if (iri.startsWith(baseIri)) { + matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); + } + }); + + List matchesByLength = new ArrayList<>(matches.keySet()); + matchesByLength.sort((left, right) -> { + // inverse order, so we get the longest match first + return matches.get(right).compareTo(matches.get(left)); + }); + + if (matchesByLength.size() > 0) { + return matchesByLength.get(0); + } else { + // no matching prefix + return iri; + } + } + + @Override + public String absolutize(String iri) throws PrefixDeclarationException { + URI relative = URI.create(iri); + + if (relative.isAbsolute()) { + return iri; + } else { + return getBase() + iri; + } + } + + @Override + public Iterator iterator() { + return this.prefixes.keySet().iterator(); + } + + /** + * Merge another set of prefix declarations. + * + * @param other the set of prefix declarations to merge. Conflicting prefixes + * will be renamed. + * + * @return this + */ + public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarations other) { + for (String prefixName : other) { + String iri; + try { + iri = other.getPrefix(prefixName); + } catch (PrefixDeclarationException e) { + // this shouldn't throw, since we already know that prefix is defined. + throw new RuntimeException(e); + } + + this.prefixes.put(prefixName, iri); + } + + return this; + } + + private String getFreshPrefix() { + for (long idx = nextIndex; true; ++idx) { + String freshPrefix = GENERATED_PREFIX_PREFIX + idx; + + if (!prefixes.containsKey(freshPrefix)) { + this.nextIndex = idx + 1; + return freshPrefix; + } + } + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 3198deafa..06915b4b1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -25,6 +25,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; /*- * #%L @@ -168,13 +169,12 @@ public Void visit(final DataSourceDeclaration statement) { */ private final LinkedHashSet statements = new LinkedHashSet<>(); -// TODO support prefixes -// /** -// * Known prefixes that can be used to pretty-print the contents of the knowledge -// * base. We try to preserve user-provided prefixes found in files when loading -// * data. -// */ -// PrefixDeclarations prefixDeclarations; + /** + * Known prefixes that can be used to pretty-print the contents of the knowledge + * base. We try to preserve user-provided prefixes found in files when loading + * data. + */ + private MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); /** * Index structure that organises all facts by their predicate. @@ -446,17 +446,19 @@ Map> getFactsByPredicate() { /** * Import rules from a file. * - * @param file the file to import - * @param parseFunction a function that transforms a {@link KnowledgeBase} using the {@link InputStream}. + * @param file the file to import + * @param parseFunction a function that transforms a {@link KnowledgeBase} using + * the {@link InputStream}. * - * @throws IOException when reading {@code file} fails - * @throws IllegalArgumentException when {@code file} is null or has already been imported - * @throws RuntimeException when parseFunction throws + * @throws IOException when reading {@code file} fails + * @throws IllegalArgumentException when {@code file} is null or has already + * been imported + * @throws RuntimeException when parseFunction throws * + * @return this */ - public KnowledgeBase importRulesFile(File file, - BiFunction parseFunction) - throws RuntimeException, IOException, IllegalArgumentException { + public KnowledgeBase importRulesFile(File file, BiFunction parseFunction) + throws RuntimeException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); @@ -466,4 +468,18 @@ public KnowledgeBase importRulesFile(File file, return parseFunction.apply(stream, this); } } + + /** + * Merge {@link PrefixDeclarations} into this knowledge base. + * + */ + public KnowledgeBase mergePrefixDeclarations(PrefixDeclarations prefixDeclarations) { + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + + return this; + } + + public PrefixDeclarations getPrefixDeclarations() { + return this.prefixDeclarations; + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index bf6c10f36..4f325de6e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +22,7 @@ import java.net.URI; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -31,7 +32,7 @@ * Implementation of {@link PrefixDeclarations} that is used when parsing data * from a single source. In this case, attempts to re-declare prefixes or the * base IRI will lead to errors. - * + * * @author Markus Kroetzsch * */ @@ -92,4 +93,9 @@ public String absolutize(String iri) throws PrefixDeclarationException { } } + @Override + public Iterator iterator() { + return this.prefixes.keySet().iterator(); + } + } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index d1395001e..93e81cb9f 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -262,7 +262,9 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException LOGGER.error("Exception while parsing Knowledge Base!", e); throw new ParsingException("Exception while parsing Knowledge Base.", e); } - return parser.getKnowledgeBase(); + + KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); + return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarations()); } protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 9036699b7..ca363c251 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -54,7 +54,7 @@ public KnowledgeBase handleDirective(List arguments, final Su try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { try { - RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); } catch (ParsingException e) { throw new RuntimeException(e); } From 22aa3fc11dbd4e8e67f4a30fbb457329b6333d09 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 13:59:36 +0100 Subject: [PATCH 0788/1255] Core: Add license headers to VLogKnowledgeBase --- .../vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java index cc192cce2..e121399a9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java @@ -150,7 +150,6 @@ Set getRules() { } /** - * * Local visitor implementation for processing statements upon loading. Internal * index structures are updated based on the statements that are detected. * From cd2407d2336b1d3f100e43e7c248b5791216b37d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:08:35 +0100 Subject: [PATCH 0789/1255] Graal: Fix Javadoc Graal: Fix style --- .../vlog4j/graal/GraalToVLog4JModelConverter.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java index fe44ed0b9..5b93b173a 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -93,8 +93,8 @@ public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom * Atoms} into a {@link List} of {@link PositiveLiteral VLog4J * PositiveLiterals}. * - * @param literals list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal - * Atoms}. + * @param atoms list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms}. * @return A {@link List} of {@link PositiveLiteral VLog4J PositiveLiterals}. */ public static List convertAtoms(final List atoms) { @@ -109,8 +109,8 @@ public static List convertAtoms(final List convertAtomsToFacts(final List atoms) { @@ -254,7 +254,7 @@ public static List convertRules(final List"a". Graal Constant with identifier * "c" will be transformed to vlog4j Constant with name * "<c>". - * + * * @throws GraalConvertException If the term is neither variable nor constant. */ private static Term convertTerm(final fr.lirmm.graphik.graal.api.core.Term term, From 36b37fe21467e2c3d603b1d85ff4bbc5eea3296b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:08:41 +0100 Subject: [PATCH 0790/1255] Parser: Fix Javadoc Parser: Fix javadoc Parser: Fix style --- .../parser/DataSourceDeclarationHandler.java | 10 ++-- .../parser/DatatypeConstantHandler.java | 5 +- .../vlog4j/parser/DirectiveArgument.java | 14 ++--- .../vlog4j/parser/DirectiveHandler.java | 6 +-- .../vlog4j/parser/ParserConfiguration.java | 25 +++++---- .../ImportFileRelativeDirectiveHandler.java | 2 - .../parser/javacc/JavaCCParserBase.java | 54 ++++++++++++------- .../parser/javacc/SubParserFactory.java | 11 ++-- .../vlog4j/syntax/parser/EntityTest.java | 5 +- .../RuleParserConfigurableLiteralTest.java | 17 +++--- .../parser/RuleParserDataSourceTest.java | 11 ++-- .../vlog4j/syntax/parser/RuleParserTest.java | 9 +--- 12 files changed, 88 insertions(+), 81 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java index 4ec871f68..bbc6a359c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * VLog4j Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 VLog4j Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,8 +20,6 @@ * #L% */ - -import org.semanticweb.vlog4j.parser.DirectiveHandler; import org.semanticweb.vlog4j.core.model.api.DataSource; /** diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java index c584b876a..eec3b4fcd 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java @@ -34,8 +34,9 @@ public interface DatatypeConstantHandler { * * @param lexicalForm lexical representation of the constant. * - * @throws ParsingException when the given representation is invalid for this datatype. - + * @throws ParsingException when the given representation is invalid for this + * datatype. + * * @return */ public DatatypeConstant createConstant(String lexicalForm) throws ParsingException; diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java index d7fc50b95..f9e71f5e6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java @@ -39,9 +39,9 @@ private DirectiveArgument() { /** * Apply a function to the contained value. * - * @argument stringHandler the function to apply to a string argument - * @argument iriHandler the function to apply to an IRI - * @argument termHandler the function to apply to a Term + * @param stringHandler the function to apply to a string argument + * @param iriHandler the function to apply to an IRI + * @param termHandler the function to apply to a Term * * @return the value returned by the appropriate handler function */ @@ -51,7 +51,7 @@ public abstract V apply(Function stringHandler, /** * Partially compare two arguments, without comparing the actual values. * - * @argument other the Object to compare to. + * @param other the Object to compare to. * * @return An {@link Optional} containing true if the arguments are surely * equal, containing false if the arguments are not equal, or an empty @@ -77,7 +77,7 @@ protected Optional isEqual(Object other) { /** * Create an argument containing a String. * - * @argument value the string value + * @param value the string value * * @return An argument containing the given string value */ @@ -111,7 +111,7 @@ public int hashCode() { /** * Create an argument containing a IRI. * - * @argument value the IRI value + * @param value the IRI value * * @return An argument containing the given IRI value */ @@ -145,7 +145,7 @@ public int hashCode() { /** * Create an argument containing a Term. * - * @argument value the Term value + * @param value the Term value * * @return An argument containing the given Term value */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 2a8c4a070..678bafca4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -160,7 +160,7 @@ public static Term validateTermArgument(final DirectiveArgument argument, final /** * Obtain a {@link KnowledgeBase} from a {@link SubParserFactory}. * - * @argument subParserFactory the SubParserFactory. + * @param subParserFactory the SubParserFactory. * * @return the knowledge base. */ @@ -173,7 +173,7 @@ default KnowledgeBase getKnowledgeBase(SubParserFactory subParserFactory) { /** * Obtain a {@link ParserConfiguration} from a {@link SubParserFactory}. * - * @argument subParserFactory the SubParserFactory. + * @param subParserFactory the SubParserFactory. * * @return the parser configuration. */ @@ -186,7 +186,7 @@ default ParserConfiguration getParserConfiguration(SubParserFactory subParserFac /** * Obtain {@link PrefixDeclarations} from a {@link SubParserFactory}. * - * @argument subParserFactory the SubParserFactory. + * @param subParserFactory the SubParserFactory. * * @return the prefix declarations. */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 5f59f75da..8b766bf66 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -20,8 +20,8 @@ * #L% */ -import java.util.HashMap; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import org.apache.commons.lang3.Validate; @@ -127,8 +127,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin * Parse a constant with optional data type. * * @param lexicalForm the (unescaped) lexical form of the constant. - * @param languageTag the language tag, or null if not present. - * @param the datatype, or null if not present. + * @param datatype the datatype, or null if not present. * * @throws ParsingException when the lexical form is invalid for the given data * type. @@ -203,8 +202,8 @@ public ParserConfiguration registerDatatype(final String name, final DatatypeCon /** * Register a custom literal handler. * - * @argument delimiter the delimiter to handle. - * @argument handler the handler for this literal type. + * @param delimiter the delimiter to handle. + * @param handler the handler for this literal type. * * @throws IllegalArgumentException when the literal delimiter has already been * registered. @@ -223,8 +222,8 @@ public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimite /** * Register a directive. * - * @argument name the name of the directive. - * @argument handler the handler for this directive. + * @param name the name of the directive. + * @param handler the handler for this directive. * * @throws IllegalArgumentException when the directive name has already been * registered, or is a reserved name (i.e., one @@ -246,16 +245,16 @@ public ParserConfiguration registerDirective(String name, DirectiveHandler arguments, SubParserFactory subParserFactory) - throws ParsingException { + public KnowledgeBase parseDirectiveStatement(String name, List arguments, + SubParserFactory subParserFactory) throws ParsingException { final DirectiveHandler handler = this.directives.get(name); if (handler == null) { @@ -267,9 +266,9 @@ public KnowledgeBase parseDirectiveStatement(String name, List= s.length() - 1) - throw new ParseException("Illegal escape at end of string, line:" + line + ", column: " + column); + throw new ParseException("Illegal escape at end of string, line: " + line + ", column: " + column); char ch2 = s.charAt(i + 1); column = column + 1; i = i + 1; @@ -222,32 +226,46 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i // Not just codepoints. Must be a legal escape. char ch3 = 0; switch (ch2) { - case 'n': + case 'n': + ch3 = '\n'; - break; + break; + case 't': + ch3 = '\t'; - break; + break; + case 'r': + ch3 = '\r'; - break; + break; + case 'b': + ch3 = '\b'; - break; + break; + case 'f': + ch3 = '\f'; - break; + break; + case '\'': - ch3 = '\''; - break; + ch3 = '\''; + break; + case '\"': - ch3 = '\"'; - break; + ch3 = '\"'; + break; + case '\\': - ch3 = '\\'; - break; + ch3 = '\\'; + break; + default: - throw new ParseException("Unknown escape: \\" + ch2 + ", line:" + line + ", column: " + column); + + throw new ParseException("Unknown escape: \\" + ch2 + ", line: " + line + ", column: " + column); } sb.append(ch3); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index c92c4be83..75019ef77 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -30,10 +30,10 @@ import org.semanticweb.vlog4j.parser.RuleParser; /** - * Factory for creating a SubParser sharing configuration, (semantic) - * state, and prefixes, but with an independent input stream, to be - * used, e.g., for parsing arguments in data source declarations. The - * parser will start in the {@code DEFAULT} lexical state. + * Factory for creating a SubParser sharing configuration, (semantic) state, and + * prefixes, but with an independent input stream, to be used, e.g., for parsing + * arguments in data source declarations. The parser will start in the + * {@code DEFAULT} lexical state. * * @author Maximilian Marx */ @@ -45,8 +45,7 @@ public class SubParserFactory { /** * Construct a SubParserFactory. * - * @param parser the parser instance to get the (semantic) state - * from. + * @param parser the parser instance to get the (semantic) state from. */ SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java index 3ca6b90d2..c1b13d3b0 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java @@ -19,7 +19,8 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; + +import static org.junit.Assert.*; import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Conjunction; @@ -36,8 +37,6 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; public class EntityTest { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java index 09770733d..12066069c 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java @@ -248,7 +248,7 @@ public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws Parsing Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( - Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset], [tst]"))); + Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset], [tst]"))); assertEquals(expected, constants); } @@ -256,11 +256,12 @@ public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws Parsing public void parseLiteral_mixedAndNestedLiterals_succeeds() throws ParsingException { ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) - .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) - .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) - .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler) - .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); - Literal result = RuleParser.parseLiteral("p(|{}|, #test#, [|test, #test#, test|], ([], {}, [{[{}]}]))", parserConfiguration); + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p(|{}|, #test#, [|test, #test#, test|], ([], {}, [{[{}]}]))", + parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); List expected = new ArrayList<>( Arrays.asList(pipeConstant, hashConstant, bracketConstant, parenConstant)); @@ -272,8 +273,8 @@ static Constant makeReversedConstant(String name) { return Expressions.makeAbstractConstant(builder.reverse().toString()); } - static ConfigurableLiteralHandler reversingHandler = - (String syntacticForm, SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm); + static ConfigurableLiteralHandler reversingHandler = (String syntacticForm, + SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm); static ConfigurableLiteralHandler getMockLiteralHandler(ConfigurableLiteralDelimiter delimiter, Constant constant) { ConfigurableLiteralHandler handler = mock(ConfigurableLiteralHandler.class); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java index 4d045d292..8554ce8e1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java @@ -19,11 +19,9 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.io.File; import java.io.IOException; @@ -150,7 +148,8 @@ public void testCustomDataSource() throws ParsingException { DirectiveArgument.string("world")); RuleParser.parseDataSourceDeclaration(input, parserConfiguration); - verify(handler).handleDirective(eq(expectedArguments), ArgumentMatchers.any()); + verify(handler).handleDirective(ArgumentMatchers.eq(expectedArguments), + ArgumentMatchers.any()); } @Test diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java index 63ab37258..7f44b75a5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java @@ -19,9 +19,8 @@ * limitations under the License. * #L% */ -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; import java.util.ArrayList; import java.util.Arrays; @@ -42,10 +41,6 @@ import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; -import org.semanticweb.vlog4j.parser.ParserConfiguration; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; public class RuleParserTest { From 4f1d069a7ce95b267ae6045d75a1bca28432e186 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:19:22 +0100 Subject: [PATCH 0791/1255] Fix source version for javadoc --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index c521ee208..ac95c88c5 100644 --- a/pom.xml +++ b/pom.xml @@ -305,6 +305,7 @@ maven-javadoc-plugin ${maven.javadoc.version} + 1.8 VLog4j homepage]]> From 822f77b61fbf20b912e18f2eda9ba98c842f9350 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:36:08 +0100 Subject: [PATCH 0792/1255] Core: Add KnowledgeBase tests --- .../core/reasoner/KnowledgeBaseTest.java | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index 61a274f23..d58b64d9c 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,8 +29,10 @@ import org.junit.Before; import org.junit.Test; import org.mockito.internal.util.collections.Sets; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; public class KnowledgeBaseTest { @@ -93,4 +95,22 @@ public void testDoRemoveStatementInexistentPredicate() { } + @Test + public void getPrefixDeclarations_default_hasEmptyBase() { + assertEquals(this.kb.getPrefixDeclarations().getBase(), ""); + } + + @Test(expected = PrefixDeclarationException.class) + public void getPrefixDeclarations_defaultUndeclaredPrefix_throws() throws PrefixDeclarationException { + this.kb.getPrefixDeclarations().getPrefix("ex"); + } + + @Test + public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationException { + String iri = "https://example.org"; + MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); + prefixDeclarations.setPrefix("ex", iri); + this.kb.mergePrefixDeclarations(prefixDeclarations); + assertEquals(this.kb.getPrefixDeclarations().getPrefix("ex"), iri); + } } From 207e84674dc36e0aa625a62a2c008d91d71f2b95 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 14:36:30 +0100 Subject: [PATCH 0793/1255] Parser: Rename tests to match code package name --- .../org/semanticweb/vlog4j/{syntax => }/parser/EntityTest.java | 2 +- .../vlog4j/{syntax => }/parser/ParserConfigurationTest.java | 2 +- .../{syntax => }/parser/RuleParserConfigurableLiteralTest.java | 2 +- .../vlog4j/{syntax => }/parser/RuleParserDataSourceTest.java | 2 +- .../vlog4j/{syntax => }/parser/RuleParserParseFactTest.java | 2 +- .../semanticweb/vlog4j/{syntax => }/parser/RuleParserTest.java | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/EntityTest.java (99%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/ParserConfigurationTest.java (98%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserConfigurableLiteralTest.java (99%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserDataSourceTest.java (99%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserParseFactTest.java (98%) rename vlog4j-parser/src/test/java/org/semanticweb/vlog4j/{syntax => }/parser/RuleParserTest.java (99%) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/EntityTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/EntityTest.java index c1b13d3b0..64520e4ed 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/EntityTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/EntityTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java similarity index 98% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java index f89c5f012..e6f83e8f1 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java index 12066069c..c69bceae6 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java index 8554ce8e1..c42975d90 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java similarity index 98% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index 0d0bd03be..f065eed5e 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java similarity index 99% rename from vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java rename to vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 7f44b75a5..eae805c6a 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/syntax/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.syntax.parser; +package org.semanticweb.vlog4j.parser; /*- * #%L From 7b9a6d9f5bf917744d735c22d8ac9b7d0c6b5224 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 17:10:50 +0100 Subject: [PATCH 0794/1255] Parser: Improve test coverage --- .../PrefixDeclarationException.java | 12 ++- .../vlog4j/parser/DirectiveArgument.java | 14 +-- .../vlog4j/parser/DirectiveHandler.java | 4 +- .../parser/LocalPrefixDeclarations.java | 10 +- ...eryResultDataSourceDeclarationHandler.java | 13 ++- .../vlog4j/parser/javacc/JavaCCParser.jj | 10 +- .../parser/javacc/JavaCCParserBase.java | 14 +-- .../vlog4j/parser/DirectiveHandlerTest.java | 96 +++++++++++++++++ .../parser/ParserConfigurationTest.java | 80 ++++++++++++-- .../vlog4j/parser/RuleParserTest.java | 7 ++ .../parser/javacc/JavaCCParserBaseTest.java | 102 ++++++++++++++++++ 11 files changed, 325 insertions(+), 37 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index f3806c21d..7c209c5cf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,12 +21,16 @@ */ public class PrefixDeclarationException extends VLog4jException { - /** - * + /** + * */ private static final long serialVersionUID = 1L; public PrefixDeclarationException(String errorMessage) { super(errorMessage); } + + public PrefixDeclarationException(String errorMessage, Throwable cause) { + super(errorMessage, cause); + } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java index f9e71f5e6..51190723e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java @@ -20,7 +20,7 @@ * #L% */ -import java.net.URL; +import java.net.URI; import java.util.Optional; import java.util.function.Function; @@ -46,7 +46,7 @@ private DirectiveArgument() { * @return the value returned by the appropriate handler function */ public abstract V apply(Function stringHandler, - Function iriHandler, Function termHandler); + Function iriHandler, Function termHandler); /** * Partially compare two arguments, without comparing the actual values. @@ -85,7 +85,7 @@ public static DirectiveArgument string(String value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler) { return stringHandler.apply(value); } @@ -115,11 +115,11 @@ public int hashCode() { * * @return An argument containing the given IRI value */ - public static DirectiveArgument iri(URL value) { + public static DirectiveArgument iri(URI value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler) { return iriHandler.apply(value); } @@ -153,7 +153,7 @@ public static DirectiveArgument term(Term value) { return new DirectiveArgument() { @Override public V apply(Function stringHandler, - Function iriHandler, Function termHandler) { + Function iriHandler, Function termHandler) { return termHandler.apply(value); } @@ -192,7 +192,7 @@ public Optional fromString() { * @return An optional containing the contained IRI, or an empty Optional if the * argument doesn't contain a IRI. */ - public Optional fromIri() { + public Optional fromIri() { return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index 678bafca4..c97cf0591 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -22,7 +22,7 @@ import java.io.File; import java.io.IOException; -import java.net.URL; +import java.net.URI; import java.util.List; import java.util.NoSuchElementException; @@ -128,7 +128,7 @@ public static File validateFilenameArgument(final DirectiveArgument argument, fi * * @return the contained IRI. */ - public static URL validateIriArgument(final DirectiveArgument argument, final String description) + public static URI validateIriArgument(final DirectiveArgument argument, final String description) throws ParsingException { try { return argument.fromIri().get(); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index 4f325de6e..6c234806e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -21,6 +21,7 @@ */ import java.net.URI; +import java.net.URISyntaxException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -85,7 +86,14 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE } public String absolutize(String iri) throws PrefixDeclarationException { - URI relative = URI.create(iri); + URI relative; + + try { + relative = new URI(iri); + } catch (URISyntaxException e) { + throw new PrefixDeclarationException("Failed to parse IRI", e); + } + if (relative.isAbsolute()) { return iri; } else { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index ff178435c..036036fb7 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -1,5 +1,7 @@ package org.semanticweb.vlog4j.parser.datasources; +import java.net.MalformedURLException; + /*- * #%L * VLog4j Parser @@ -20,6 +22,7 @@ * #L% */ +import java.net.URI; import java.net.URL; import java.util.List; @@ -41,10 +44,16 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 3); - URL endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); + URI endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); - return new SparqlQueryResultDataSource(endpoint, variables, query); + URL endpointURL; + try { + endpointURL = endpoint.toURL(); + } catch (MalformedURLException e) { + throw new ParsingException("URI \"" + endpoint + "\" is not a valid URL", e); + } + return new SparqlQueryResultDataSource(endpointURL, variables, query); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 7024f63c3..5cbd83f81 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -17,8 +17,8 @@ package org.semanticweb.vlog4j.parser.javacc; import java.io.File; import java.io.InputStream; import java.io.IOException; -import java.net.URL; -import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; import java.util.List; import java.util.Deque; @@ -411,10 +411,10 @@ LinkedList< DirectiveArgument > Arguments() throws PrefixDeclarationException : } { ( LOOKAHEAD(String()) str = String() { argument = DirectiveArgument.string(str); } | LOOKAHEAD(absoluteIri()) str = absoluteIri() { - URL url; + URI url; try { - url = new URL(str); - } catch (MalformedURLException e) { + url = new URI(str); + } catch (URISyntaxException e) { throw makeParseExceptionWithCause("Error parsing IRIhandler: " + e.getMessage(), e); } argument = DirectiveArgument.iri(url); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 43b42f58c..ed2f353ba 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -151,8 +151,8 @@ NamedNull createNamedNull(String lexicalForm) { /** * Creates a suitable {@link Constant} from the parsed data. * - * @param string the string data (unescaped) - * @param datatype the datatype, or null if not provided + * @param string the string data (unescaped) + * @param datatype the datatype, or null if not provided * @return suitable constant */ Constant createConstant(String lexicalForm, String datatype) throws ParseException { @@ -181,10 +181,10 @@ void addDataSource(String predicateName, int arity, DataSource dataSource) throw } static String unescapeStr(String s, int line, int column) throws ParseException { - return unescape(s, '\\', false, line, column); + return unescape(s, '\\', line, column); } - static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) throws ParseException { + static String unescape(String s, char escape, int line, int column) throws ParseException { int i = s.indexOf(escape); if (i == -1) @@ -264,7 +264,6 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i break; default: - throw new ParseException("Unknown escape: \\" + ch2 + ", line: " + line + ", column: " + column); } sb.append(ch3); @@ -273,7 +272,7 @@ static String unescape(String s, char escape, boolean pointCodeOnly, int line, i } /** - * Remove the first and last {@code n} characters from string {@code s} + * Remove the first and last {@code n} characters from string {@code s} * * @param s string to strip delimiters from * @param n number of characters to strip from both ends @@ -352,7 +351,8 @@ Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syn return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); } - KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) throws ParseException { + KnowledgeBase parseDirectiveStatement(String name, List arguments, + SubParserFactory subParserFactory) throws ParseException { try { return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); } catch (ParsingException e) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java new file mode 100644 index 000000000..eab233671 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java @@ -0,0 +1,96 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.net.URI; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + +public class DirectiveHandlerTest { + private static final String STRING = "src/test/resources/facts.rls"; + private static final URI IRI = URI.create("https://example.org"); + private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + + private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); + private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); + private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + + @Test + public void validateStringArgument_stringArgument_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateStringArgument(STRING_ARGUMENT, "string argument"), STRING); + } + + @Test(expected = ParsingException.class) + public void validateStringArgument_iriArgument_throws() throws ParsingException { + DirectiveHandler.validateStringArgument(IRI_ARGUMENT, "string argument"); + } + + @Test(expected = ParsingException.class) + public void validateStringArgument_termArgument_throws() throws ParsingException { + DirectiveHandler.validateStringArgument(TERM_ARGUMENT, "string argument"); + } + + @Test + public void validateIriArgument_iriArgument_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateIriArgument(IRI_ARGUMENT, "iri argument"), IRI); + } + + @Test(expected = ParsingException.class) + public void validateIriArgument_StringArgument_throws() throws ParsingException { + DirectiveHandler.validateIriArgument(STRING_ARGUMENT, "iri argument"); + } + + @Test(expected = ParsingException.class) + public void validateIriArgument_termArgument_throws() throws ParsingException { + DirectiveHandler.validateIriArgument(TERM_ARGUMENT, "iri argument"); + } + + @Test + public void validateTermArgument_termArgument_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateTermArgument(TERM_ARGUMENT, "term argument"), TERM); + } + + @Test(expected = ParsingException.class) + public void validateTermArgument_stringArgument_throws() throws ParsingException { + DirectiveHandler.validateTermArgument(STRING_ARGUMENT, "term argument"); + } + + @Test(expected = ParsingException.class) + public void validateTermArgument_iriArgument_throws() throws ParsingException { + DirectiveHandler.validateTermArgument(IRI_ARGUMENT, "term argument"); + } + + @Test + public void validateFilenameArgument_filename_succeeds() throws ParsingException { + assertEquals(DirectiveHandler.validateFilenameArgument(STRING_ARGUMENT, "filename argument").getPath(), STRING); + } + + @Test + public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { + DirectiveHandler.validateFilenameArgument(DirectiveArgument.string(STRING + "-nonexistant"), + "filename argument"); + } + +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java index e6f83e8f1..e78a42ca5 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java @@ -20,47 +20,109 @@ * #L% */ -import static org.mockito.Mockito.*; +import static org.junit.Assert.*; +import java.util.ArrayList; + +import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DataSourceDeclarationHandler; import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; import org.semanticweb.vlog4j.parser.ParserConfiguration; -import org.semanticweb.vlog4j.parser.ParsingException; +import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; public class ParserConfigurationTest { private static final String TYPE_NAME = "test-type"; private static final String SOURCE_NAME = "test-source"; + private static final String DIRECTIVE_NAME = "test-directive"; + + private ParserConfiguration parserConfiguration; - private final DatatypeConstantHandler datatypeConstantHandler = mock(DatatypeConstantHandler.class); - private final DataSourceDeclarationHandler dataSourceDeclarationHandler = mock(DataSourceDeclarationHandler.class); + @Mock + private DatatypeConstantHandler datatypeConstantHandler; + @Mock + private DataSourceDeclarationHandler dataSourceDeclarationHandler; + @Mock + private SubParserFactory subParserFactory; + @Mock + private DirectiveHandler directiveHandler; + + @Before + public void init() { + parserConfiguration = new ParserConfiguration(); + } @Test(expected = IllegalArgumentException.class) public void registerDataSource_duplicateName_throws() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); - parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler) .registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler); } @Test(expected = IllegalArgumentException.class) public void registerDatatype_duplicateName_throws() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDatatype(TYPE_NAME, datatypeConstantHandler); } @Test public void registerDataSource_datatypeName_succeeds() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDataSource(TYPE_NAME, dataSourceDeclarationHandler); } @Test public void registerDatatype_dataSourceName_succeeds() { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler).registerDatatype(SOURCE_NAME, datatypeConstantHandler); } + + @Test + public void isParsingOfNamedNullsAllowed_default_returnsFalse() { + assertFalse("named nulls are disallowed by default", parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test + public void isParsingOfNamedNullsAllowed_enabled_returnsTrue() { + parserConfiguration.allowNamedNulls(); + assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test + public void isParsingOfNamedNullsAllowed_enabledAndDisabled_returnsFalse() { + parserConfiguration.allowNamedNulls(); + assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + parserConfiguration.disallowNamedNulls(); + assertFalse("named nulls are disallowed after disallowing them", + parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test(expected = ParsingException.class) + public void parseConfigurableLiteral_unregisteredLiteral_throws() throws ParsingException { + parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, "test", subParserFactory); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDirective_reservedName_throws() throws IllegalArgumentException { + parserConfiguration.registerDirective("base", directiveHandler); + } + + @Test + public void registerDirective_unreserverdName_succeeds() throws IllegalArgumentException { + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDirective_duplicateName_throws() throws IllegalArgumentException { + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + } + + @Test(expected = ParsingException.class) + public void parseDirectiveStatement_unregisteredDirective_throws() throws ParsingException { + parserConfiguration.parseDirectiveStatement(DIRECTIVE_NAME, new ArrayList<>(), subParserFactory); + } + } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index eae805c6a..78bb44d6d 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -461,4 +461,11 @@ public void parseInto_duplicateImportStatements_throws() throws ParsingException KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } + + @Test(expected = ParsingException.class) + public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingException { + String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + RuleParser.parseInto(knowledgeBase, input); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java new file mode 100644 index 000000000..045cf373a --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBaseTest.java @@ -0,0 +1,102 @@ +package org.semanticweb.vlog4j.parser.javacc; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.parser.DatatypeConstantHandler; +import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; +import org.semanticweb.vlog4j.parser.ParserConfiguration; +import org.semanticweb.vlog4j.parser.ParsingException; + +public class JavaCCParserBaseTest { + private JavaCCParserBase parserBase; + private static final String DATATYPE_NAME = "https://example.org/test-type"; + + private DatatypeConstantHandler datatypeConstantHandler = mock(DatatypeConstantHandler.class); + + @Before + public void init() { + parserBase = new JavaCCParserBase(); + } + + @Rule + public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void createConstant_undeclaredPrefix_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Failed to parse IRI"); + parserBase.createConstant("ïnvälid://test"); + } + + @Test + public void createConstant_throwingDatatypeConstantHandler_throws() throws ParseException, ParsingException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Failed to parse Constant"); + + when(datatypeConstantHandler.createConstant(anyString())).thenThrow(ParsingException.class); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().registerDatatype(DATATYPE_NAME, + datatypeConstantHandler); + parserBase.setParserConfiguration(parserConfiguration); + parserBase.createConstant("test", DATATYPE_NAME); + } + + @Test + public void unescapeStr_escapeChars_succeeds() throws ParseException { + String input = "\\\\test\r\ntest: \\n\\t\\r\\b\\f\\'\\\"\\\\"; + String expected = "\\test\r\ntest: \n\t\r\b\f\'\"\\"; + String result = JavaCCParserBase.unescapeStr(input, 0, 0); + assertEquals(result, expected); + } + + @Test + public void unescapeStr_illegalEscapeAtEndOfString_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Illegal escape at end of string"); + + JavaCCParserBase.unescapeStr("\\", 0, 0); + } + + @Test + public void unescapeStr_unknownEscapeSequence_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Unknown escape"); + + JavaCCParserBase.unescapeStr("\\y", 0, 0); + } + + @Test + public void setBase_changingBase_throws() throws PrefixDeclarationException { + exceptionRule.expect(PrefixDeclarationException.class); + exceptionRule.expectMessage("Base is already defined as"); + + parserBase.setBase("https://example.org/"); + parserBase.setBase("https://example.com/"); + } +} From e6d02dc7b6b0d9ce800cab0df71b86afe7882dd7 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 11 Feb 2020 17:44:07 +0100 Subject: [PATCH 0795/1255] Core: Add some tests for MergeablePrefixDeclarations --- .../MergeablePrefixDeclarations.java | 5 +- .../MergeablePrefixDeclarationsTest.java | 104 ++++++++++++++++++ 2 files changed, 106 insertions(+), 3 deletions(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index f586fa9ff..050cea6c8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -26,7 +26,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.function.BiFunction; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; @@ -126,7 +125,7 @@ public String unresolveAbsoluteIri(String iri) { } @Override - public String absolutize(String iri) throws PrefixDeclarationException { + public String absolutize(String iri) { URI relative = URI.create(iri); if (relative.isAbsolute()) { @@ -167,7 +166,7 @@ public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarati private String getFreshPrefix() { for (long idx = nextIndex; true; ++idx) { - String freshPrefix = GENERATED_PREFIX_PREFIX + idx; + String freshPrefix = GENERATED_PREFIX_PREFIX + idx + ":"; if (!prefixes.containsKey(freshPrefix)) { this.nextIndex = idx + 1; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java new file mode 100644 index 000000000..ba6fd72a7 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -0,0 +1,104 @@ +package org.semanticweb.vlog4j.core.model; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; + +public class MergeablePrefixDeclarationsTest { + private MergeablePrefixDeclarations prefixDeclarations; + + private static final String BASE = "https://example.org/"; + private static final String MORE_SPECIFIC = BASE + "example/"; + private static final String RELATIVE = "relative/test"; + + + @Before + public void init() { + prefixDeclarations = new MergeablePrefixDeclarations(); + } + + @Test + public void setBase_changingBase_succeeds() { + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.getBase(), BASE); + prefixDeclarations.setBase(MORE_SPECIFIC); + assertEquals(prefixDeclarations.getBase(), MORE_SPECIFIC); + } + + @Test + public void setBase_redeclareSameBase_succeeds() { + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.getBase(), BASE); + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.getBase(), BASE); + } + + @Test + public void absolutize_noBase_identical() { + assertEquals(prefixDeclarations.absolutize(RELATIVE), RELATIVE); + } + + @Test + public void absolutize_base_absoluteIri() { + prefixDeclarations.setBase(BASE); + assertEquals(prefixDeclarations.absolutize(RELATIVE), BASE + RELATIVE); + } + + @Test + public void absolutize_absoluteIri_identical() { + assertEquals(prefixDeclarations.absolutize(BASE), BASE); + } + + @Test(expected = PrefixDeclarationException.class) + public void resolvePrefixedName_undeclaredPrefix_throws() throws PrefixDeclarationException { + prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE); + } + + @Test + public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE), BASE + RELATIVE); + } + + @Test + public void setPrefix_redeclarePrefix_succeeds() { + prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + } + + @Test + public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { + String prefix = "vlog4j_generated_"; + prefixDeclarations.setPrefix(prefix + "0:", BASE + "generated/"); + prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + + assertEquals(prefixDeclarations.getPrefix(prefix + "1:"), MORE_SPECIFIC); + } + + +} From cf3a0c61bd6f4c95aeeccad2e369fb45ba3a32d8 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 16:24:01 +0100 Subject: [PATCH 0796/1255] Core: Don't expose PrefixDeclarations in KnowledgeBase --- .../vlog4j/core/reasoner/KnowledgeBase.java | 55 ++++++++++++++++++- .../core/reasoner/KnowledgeBaseTest.java | 26 +++++---- 2 files changed, 68 insertions(+), 13 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 06915b4b1..088ffda91 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -17,6 +17,7 @@ import java.util.function.BiFunction; import org.apache.commons.lang3.Validate; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; @@ -472,6 +473,11 @@ public KnowledgeBase importRulesFile(File file, BiFunction Date: Wed, 12 Feb 2020 17:03:18 +0100 Subject: [PATCH 0797/1255] Core: Add more tests for MergeablePrefixDeclarations --- .../MergeablePrefixDeclarations.java | 3 +- .../MergeablePrefixDeclarationsTest.java | 60 +++++++++++++++++++ 2 files changed, 62 insertions(+), 1 deletion(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index 050cea6c8..7e8404060 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -105,7 +105,8 @@ public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); prefixes.forEach((prefixName, baseIri) -> { - if (iri.startsWith(baseIri)) { + // only select proper prefixes here, since `eg:` is not a valid prefixed name. + if (iri.startsWith(baseIri) && !iri.equals(baseIri)) { matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); } }); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index ba6fd72a7..7070655c2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -23,16 +23,21 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; +import java.util.Arrays; + import org.junit.Before; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; public class MergeablePrefixDeclarationsTest { private MergeablePrefixDeclarations prefixDeclarations; private static final String BASE = "https://example.org/"; + private static final String UNRELATED = "https://example.com/"; private static final String MORE_SPECIFIC = BASE + "example/"; + private static final String EVEN_MORE_SPECIFIC = MORE_SPECIFIC + "relative/"; private static final String RELATIVE = "relative/test"; @@ -100,5 +105,60 @@ public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDe assertEquals(prefixDeclarations.getPrefix(prefix + "1:"), MORE_SPECIFIC); } + @Test + public void mergeablePrefixDeclarations_constructor_succeeds() throws PrefixDeclarationException { + this.prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(this.prefixDeclarations); + assertEquals(prefixDeclarations.getPrefix("eg:"), MORE_SPECIFIC); + } + + @Test(expected = RuntimeException.class) + public void mergePrefixDeclarations_getPrefixUnexpectedlyThrows_throws() throws PrefixDeclarationException { + PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); + + when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:", "ex:").iterator()); + when(prefixDeclarations.getPrefix(anyString())).thenThrow(PrefixDeclarationException.class); + + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + } + + @Test + public void unresolveAbsoluteIri_default_identical() { + assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + } + + @Test + public void unresolveAbsoluteIri_declaredPrefix_succeeds() { + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + } + + @Test + public void unresolveAbsoluteIri_unrelatedPrefix_identical() { + prefixDeclarations.setPrefix("eg:", UNRELATED); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + } + + @Test + public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { + prefixDeclarations.setPrefix("ex:", UNRELATED); + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + } + + @Test + public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { + prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefix("ex:", MORE_SPECIFIC); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "ex:" + RELATIVE); + prefixDeclarations.setPrefix("er:", EVEN_MORE_SPECIFIC); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "er:test"); + } + @Test + public void unresolveAbsoluteIri_exactPrefixMatch_identical() { + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + } } From 0fb67863cf84d76993d7077f7c574776a64003e4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 17:45:38 +0100 Subject: [PATCH 0798/1255] Parser: Move URL validation into DirectiveHandler --- .../vlog4j/parser/DirectiveHandler.java | 23 +++++++++++++++++++ ...eryResultDataSourceDeclarationHandler.java | 17 ++++---------- .../vlog4j/parser/DirectiveHandlerTest.java | 12 +++++++++- 3 files changed, 38 insertions(+), 14 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index c97cf0591..b4c9cb4b3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -22,7 +22,9 @@ import java.io.File; import java.io.IOException; +import java.net.MalformedURLException; import java.net.URI; +import java.net.URL; import java.util.List; import java.util.NoSuchElementException; @@ -137,6 +139,27 @@ public static URI validateIriArgument(final DirectiveArgument argument, final St } } + /** + * Validate that the provided argument is a {@link URL}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a valid {@link URL}. + * + * @return the {@link URL} corresponding to the contained IRI. + */ + public static URL validateUrlArgument(final DirectiveArgument argument, final String description) + throws ParsingException { + URI iri = DirectiveHandler.validateIriArgument(argument, description); + try { + return iri.toURL(); + } catch (MalformedURLException e) { + throw new ParsingException(description + "\"" + argument + "\" is not a valid URL.", e); + } + } + /** * Validate that the provided argument is a {@link Term}. * diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 036036fb7..aaf51047b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.parser.datasources; -import java.net.MalformedURLException; - /*- * #%L * VLog4j Parser @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +20,6 @@ * #L% */ -import java.net.URI; import java.net.URL; import java.util.List; @@ -44,16 +41,10 @@ public class SparqlQueryResultDataSourceDeclarationHandler implements DataSource public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 3); - URI endpoint = DirectiveHandler.validateIriArgument(arguments.get(0), "SPARQL endpoint"); + URL endpoint = DirectiveHandler.validateUrlArgument(arguments.get(0), "SPARQL endpoint"); String variables = DirectiveHandler.validateStringArgument(arguments.get(1), "variables list"); String query = DirectiveHandler.validateStringArgument(arguments.get(2), "query fragment"); - URL endpointURL; - try { - endpointURL = endpoint.toURL(); - } catch (MalformedURLException e) { - throw new ParsingException("URI \"" + endpoint + "\" is not a valid URL", e); - } - return new SparqlQueryResultDataSource(endpointURL, variables, query); + return new SparqlQueryResultDataSource(endpoint, variables, query); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java index eab233671..958a9ac76 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveHandlerTest.java @@ -19,9 +19,9 @@ * limitations under the License. * #L% */ - import static org.junit.Assert.*; +import java.net.MalformedURLException; import java.net.URI; import org.junit.Test; @@ -93,4 +93,14 @@ public void validateFilenameArgument_invalidFilename_throws() throws ParsingExce "filename argument"); } + @Test + public void validateUrlArgument_url_succeeds() throws ParsingException, MalformedURLException { + assertEquals(DirectiveHandler.validateUrlArgument(IRI_ARGUMENT, "urls argument"), IRI.toURL()); + } + + @Test(expected = ParsingException.class) + public void validateUrlArgument_invalidUrl_throws() throws ParsingException { + DirectiveHandler.validateUrlArgument(DirectiveArgument.iri(URI.create("example://test")), "url argument"); + } + } From 6ede31846a8557b362f6c5efdebee5855f5ee1d4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 17:54:19 +0100 Subject: [PATCH 0799/1255] Parser: Add license header on JavaCCParser --- pom.xml | 7 +++++++ .../vlog4j/parser/javacc/JavaCCParser.jj | 20 +++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/pom.xml b/pom.xml index ac95c88c5..ded2f85ff 100644 --- a/pom.xml +++ b/pom.xml @@ -135,6 +135,13 @@ src/main/java src/test/java + true + + true + + java + diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 5cbd83f81..8dcbb3baf 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -1,3 +1,23 @@ +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2020 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + options { // Use \ u escapes in streams AND use a reader for the query From e55bcfc4652551a9a62b9a4ee1e25cb04aeed7e6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 19:37:37 +0100 Subject: [PATCH 0800/1255] Fix style --- .../MergeablePrefixDeclarations.java | 2 +- .../MergeablePrefixDeclarationsTest.java | 1 - .../parser/javacc/JavaCCParserBase.java | 54 +++++++------------ 3 files changed, 21 insertions(+), 36 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index 7e8404060..fbdf49b59 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -105,7 +105,7 @@ public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); prefixes.forEach((prefixName, baseIri) -> { - // only select proper prefixes here, since `eg:` is not a valid prefixed name. + // only select proper prefixes here, since `eg:` is not a valid prefixed name. if (iri.startsWith(baseIri) && !iri.equals(baseIri)) { matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index 7070655c2..77ca77437 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -40,7 +40,6 @@ public class MergeablePrefixDeclarationsTest { private static final String EVEN_MORE_SPECIFIC = MORE_SPECIFIC + "relative/"; private static final String RELATIVE = "relative/test"; - @Before public void init() { prefixDeclarations = new MergeablePrefixDeclarations(); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index ed2f353ba..497fa15e5 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -187,8 +187,9 @@ static String unescapeStr(String s, int line, int column) throws ParseException static String unescape(String s, char escape, int line, int column) throws ParseException { int i = s.indexOf(escape); - if (i == -1) + if (i == -1) { return s; + } // Dump the initial part straight into the string buffer StringBuilder sb = new StringBuilder(s.substring(0, i)); @@ -197,17 +198,14 @@ static String unescape(String s, char escape, int line, int column) throws Parse char ch = s.charAt(i); // Keep line and column numbers. switch (ch) { - case '\n': - case '\r': - line++; - + case '\n': + case '\r': + line++; column = 1; - break; + break; default: - column++; - break; } @@ -217,8 +215,9 @@ static String unescape(String s, char escape, int line, int column) throws Parse } // Escape - if (i >= s.length() - 1) + if (i >= s.length() - 1) { throw new ParseException("Illegal escape at end of string, line: " + line + ", column: " + column); + } char ch2 = s.charAt(i + 1); column = column + 1; i = i + 1; @@ -226,43 +225,30 @@ static String unescape(String s, char escape, int line, int column) throws Parse // Not just codepoints. Must be a legal escape. char ch3 = 0; switch (ch2) { - case 'n': - + case 'n': ch3 = '\n'; - break; - + break; case 't': - ch3 = '\t'; - break; - + break; case 'r': - ch3 = '\r'; - break; - + break; case 'b': - ch3 = '\b'; - break; - + break; case 'f': - ch3 = '\f'; - break; - + break; case '\'': - ch3 = '\''; - break; - + ch3 = '\''; + break; case '\"': - ch3 = '\"'; - break; - + ch3 = '\"'; + break; case '\\': - ch3 = '\\'; - break; - + ch3 = '\\'; + break; default: throw new ParseException("Unknown escape: \\" + ch2 + ", line: " + line + ", column: " + column); } From 532b666e59f004c1670e2e0468adef11aef261a6 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 19:49:05 +0100 Subject: [PATCH 0801/1255] Core: Add round-trip tests for MergeablePrefixDeclarations --- .../model/MergeablePrefixDeclarationsTest.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index 77ca77437..c89ad654f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -88,6 +88,14 @@ public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationE assertEquals(prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE), BASE + RELATIVE); } + @Test + public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws PrefixDeclarationException { + prefixDeclarations.setPrefix("eg:", BASE); + String resolved = prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE); + String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); + assertEquals(prefixDeclarations.resolvePrefixedName(unresolved), resolved); + } + @Test public void setPrefix_redeclarePrefix_succeeds() { prefixDeclarations.setPrefix("eg:", BASE); @@ -160,4 +168,12 @@ public void unresolveAbsoluteIri_exactPrefixMatch_identical() { prefixDeclarations.setPrefix("eg:", BASE); assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); } + + @Test + public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws PrefixDeclarationException { + prefixDeclarations.setPrefix("eg:", BASE); + String unresolved = prefixDeclarations.unresolveAbsoluteIri(BASE + RELATIVE); + String resolved = prefixDeclarations.resolvePrefixedName(unresolved); + assertEquals(prefixDeclarations.unresolveAbsoluteIri(resolved), unresolved); + } } From fdb3b36372429d754e2e28bde5417cd2a9bee509 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 20:05:02 +0100 Subject: [PATCH 0802/1255] Parser: Simplify tests for configurable literals --- .../RuleParserConfigurableLiteralTest.java | 28 ++++++------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java index c69bceae6..8a6e31fc8 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserConfigurableLiteralTest.java @@ -28,6 +28,7 @@ import java.util.List; import java.util.stream.Collectors; +import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatchers; import org.semanticweb.vlog4j.core.model.api.Constant; @@ -58,6 +59,13 @@ public class RuleParserConfigurableLiteralTest { public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( ConfigurableLiteralDelimiter.BRACKET, bracketConstant); + private ParserConfiguration parserConfiguration; + + @Before + public void init() { + parserConfiguration = new ParserConfiguration(); + } + @Test(expected = ParsingException.class) public void parseLiteral_unregisteredCustomLiteral_throws() throws ParsingException { RuleParser.parseLiteral("p(|test|)"); @@ -65,7 +73,6 @@ public void parseLiteral_unregisteredCustomLiteral_throws() throws ParsingExcept @Test public void registerLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); assertTrue("Configurable Literal Handler has been registered", parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE)); @@ -73,14 +80,12 @@ public void registerLiteral_succeeds() throws ParsingException { @Test(expected = IllegalArgumentException.class) public void registerLiteral_duplicateHandler_throws() throws ParsingException, IllegalArgumentException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.PIPE, hashHandler); } @Test public void parseLiteral_customPipeLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); Literal result = RuleParser.parseLiteral("p(|test|)", parserConfiguration); assertEquals(pipeConstant, result.getConstants().toArray()[0]); @@ -88,7 +93,6 @@ public void parseLiteral_customPipeLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customHashLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler); Literal result = RuleParser.parseLiteral("p(#test#)", parserConfiguration); assertEquals(hashConstant, result.getConstants().toArray()[0]); @@ -96,7 +100,6 @@ public void parseLiteral_customHashLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customParenLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler); Literal result = RuleParser.parseLiteral("p((test))", parserConfiguration); assertEquals(parenConstant, result.getConstants().toArray()[0]); @@ -104,7 +107,6 @@ public void parseLiteral_customParenLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customBraceLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); Literal result = RuleParser.parseLiteral("p({test})", parserConfiguration); assertEquals(braceConstant, result.getConstants().toArray()[0]); @@ -112,7 +114,6 @@ public void parseLiteral_customBraceLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_customBracketLiteral_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); assertEquals(bracketConstant, result.getConstants().toArray()[0]); @@ -120,7 +121,6 @@ public void parseLiteral_customBracketLiteral_succeeds() throws ParsingException @Test public void parseLiteral_mixedLiterals_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler) @@ -136,7 +136,6 @@ public void parseLiteral_mixedLiterals_succeeds() throws ParsingException { public void parseLiteral_nontrivialPipeLiteral_succeeds() throws ParsingException { String label = "this is a test, do not worry."; String input = "p(|" + label + "|)"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -146,7 +145,6 @@ public void parseLiteral_nontrivialPipeLiteral_succeeds() throws ParsingExceptio public void parseLiteral_nestedParenLiterals_succeeds() throws ParsingException { String label = "(((this is a test, do not worry.)))"; String input = "p((" + label + "))"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -155,7 +153,6 @@ public void parseLiteral_nestedParenLiterals_succeeds() throws ParsingException @Test public void parseLiteral_multipleParenLiterals_succeeds() throws ParsingException { String input = "p((test), (tset))"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -167,7 +164,6 @@ public void parseLiteral_multipleParenLiterals_succeeds() throws ParsingExceptio @Test public void parseLiteral_multipleNestedParenLiterals_succeeds() throws ParsingException { String input = "p(((test)), ((tset), (tst)))"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -179,7 +175,6 @@ public void parseLiteral_multipleNestedParenLiterals_succeeds() throws ParsingEx @Test(expected = ParsingException.class) public void parseLiteral_mismatchedNestedParenLiteral_throws() throws ParsingException { String input = "p((test ())"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); RuleParser.parseLiteral(input, parserConfiguration); } @@ -188,7 +183,6 @@ public void parseLiteral_mismatchedNestedParenLiteral_throws() throws ParsingExc public void parseLiteral_nestedBraceLiteral_succeeds() throws ParsingException { String label = "{{{this is a test, do not worry.}}}"; String input = "p({" + label + "})"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -197,7 +191,6 @@ public void parseLiteral_nestedBraceLiteral_succeeds() throws ParsingException { @Test public void parseLiteral_multipleBraceLiterals_succeeds() throws ParsingException { String input = "p({test}, {tset})"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -209,7 +202,6 @@ public void parseLiteral_multipleBraceLiterals_succeeds() throws ParsingExceptio @Test public void parseLiteral_multipleNestedBraceLiterals_succeeds() throws ParsingException { String input = "p({{test}}, {{tset}})"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -222,7 +214,6 @@ public void parseLiteral_multipleNestedBraceLiterals_succeeds() throws ParsingEx public void parseLiteral_nestedBracketLiteral_succeeds() throws ParsingException { String label = "[[[this is a test, do not worry.]]]"; String input = "p([" + label + "])"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); @@ -231,7 +222,6 @@ public void parseLiteral_nestedBracketLiteral_succeeds() throws ParsingException @Test public void parseLiteral_multipleBracketLiterals_succeeds() throws ParsingException { String input = "p([test], [tset])"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -243,7 +233,6 @@ public void parseLiteral_multipleBracketLiterals_succeeds() throws ParsingExcept @Test public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws ParsingException { String input = "p([[test]], [[tset], [tst]])"; - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); Literal result = RuleParser.parseLiteral(input, parserConfiguration); List constants = result.getConstants().collect(Collectors.toList()); @@ -254,7 +243,6 @@ public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws Parsing @Test public void parseLiteral_mixedAndNestedLiterals_succeeds() throws ParsingException { - ParserConfiguration parserConfiguration = new ParserConfiguration(); parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) From ee05e918cab3646e129743c34f85edfdc67685af Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Wed, 12 Feb 2020 20:15:06 +0100 Subject: [PATCH 0803/1255] Core: Fix tests for MergeablePrefixDeclarations --- .../core/model/MergeablePrefixDeclarationsTest.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index c89ad654f..a83ff8f3e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -90,8 +90,9 @@ public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationE @Test public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); - String resolved = prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE); + String prefix = "eg:"; + prefixDeclarations.setPrefix(prefix, BASE); + String resolved = BASE + RELATIVE; String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); assertEquals(prefixDeclarations.resolvePrefixedName(unresolved), resolved); } @@ -171,8 +172,9 @@ public void unresolveAbsoluteIri_exactPrefixMatch_identical() { @Test public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); - String unresolved = prefixDeclarations.unresolveAbsoluteIri(BASE + RELATIVE); + String prefix = "eg:"; + prefixDeclarations.setPrefix(prefix, BASE); + String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); assertEquals(prefixDeclarations.unresolveAbsoluteIri(resolved), unresolved); } From 28c71c4bc10ab6d79e2125d43c4bea895c1a05f8 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 15:32:13 +0100 Subject: [PATCH 0804/1255] Parser: Allow inherited @base to be overridden in @import-relative --- .../vlog4j/core/reasoner/KnowledgeBase.java | 22 +++++++++++++++---- .../parser/LocalPrefixDeclarations.java | 11 +++++++++- .../semanticweb/vlog4j/parser/RuleParser.java | 8 +------ .../ImportFileDirectiveHandler.java | 9 ++------ .../ImportFileRelativeDirectiveHandler.java | 9 ++------ .../vlog4j/parser/RuleParserTest.java | 10 ++++++++- vlog4j-parser/src/test/resources/base.rls | 4 ++++ 7 files changed, 46 insertions(+), 27 deletions(-) create mode 100644 vlog4j-parser/src/test/resources/base.rls diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 088ffda91..b8735c4fd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -14,7 +14,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.BiFunction; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -444,6 +443,21 @@ Map> getFactsByPredicate() { return this.factsByPredicate; } + /** + * Interface for a method that parses the contents of a stream into a + * KnowledgeBase. + * + * This is essentially + * {@link org.semanticweb.vlog4j.parser.RuleParser#parseInto}, but we need to + * avoid a circular dependency here -- this is also why we throw + * {@link Exception} instead of + * {@link org.semanticweb.vlog4j.parser.ParsingException}. + */ + @FunctionalInterface + public interface AdditionalInputParser { + KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, Exception; + } + /** * Import rules from a file. * @@ -458,15 +472,15 @@ Map> getFactsByPredicate() { * * @return this */ - public KnowledgeBase importRulesFile(File file, BiFunction parseFunction) - throws RuntimeException, IOException, IllegalArgumentException { + public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunction) + throws Exception, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); try (InputStream stream = new FileInputStream(file)) { - return parseFunction.apply(stream, this); + return parseFunction.parseInto(stream, this); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java index 6c234806e..8bc8a93b6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java @@ -41,10 +41,19 @@ final public class LocalPrefixDeclarations implements PrefixDeclarations { Map prefixes = new HashMap<>(); String baseUri; + String fallbackUri; + + public LocalPrefixDeclarations() { + this(""); // empty string encodes: "no base" (use relative IRIs) + } + + public LocalPrefixDeclarations(String fallbackUri) { + this.fallbackUri = fallbackUri; + } public String getBase() { if (this.baseUri == null) { - this.baseUri = ""; // empty string encodes: "no base" (use relative IRIs) + this.baseUri = this.fallbackUri; } return baseUri.toString(); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 93e81cb9f..cf26de098 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -58,13 +58,7 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea final JavaCCParser parser = new JavaCCParser(stream, encoding); if (baseIri != null) { - PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(); - - try { - prefixDeclarations.setBase(baseIri); - } catch (PrefixDeclarationException e) { - throw new ParsingException("Invalid base IRI \"" + baseIri + "\"", e); - } + PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(baseIri); parser.setPrefixDeclarations(prefixDeclarations); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java index 3be532ba7..af494d37c 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -21,7 +21,6 @@ */ import java.io.File; -import java.io.IOException; import java.io.InputStream; import java.util.List; @@ -50,14 +49,10 @@ public KnowledgeBase handleDirective(List arguments, final Su try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { - try { - RuleParser.parseInto(kb, stream, parserConfiguration); - } catch (ParsingException e) { - throw new RuntimeException(e); - } + RuleParser.parseInto(kb, stream, parserConfiguration); return kb; }); - } catch (RuntimeException | IOException e) { + } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 6b9d18d4e..680c498e6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -21,7 +21,6 @@ */ import java.io.File; -import java.io.IOException; import java.io.InputStream; import java.util.List; @@ -51,14 +50,10 @@ public KnowledgeBase handleDirective(List arguments, final Su try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { - try { - RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); - } catch (ParsingException e) { - throw new RuntimeException(e); - } + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); return kb; }); - } catch (RuntimeException | IOException e) { + } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 78bb44d6d..a18f20aec 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -447,7 +447,7 @@ public void parse_importStatement_succeeds() throws ParsingException { } @Test - public void parse_relativeImportStatement_suceeds() throws ParsingException { + public void parse_relativeImportStatement_succeeds() throws ParsingException { String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; KnowledgeBase knowledgeBase = RuleParser.parse(input); List expected = Arrays.asList(fact1, fact3); @@ -468,4 +468,12 @@ public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingE KnowledgeBase knowledgeBase = RuleParser.parse(input); RuleParser.parseInto(knowledgeBase, input); } + + public void parseInto_relativeImportRedeclaringBase_succeeds() throws ParsingException { + String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List expected = Arrays.asList(fact1, fact2); + List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } } diff --git a/vlog4j-parser/src/test/resources/base.rls b/vlog4j-parser/src/test/resources/base.rls new file mode 100644 index 000000000..998d6d6fb --- /dev/null +++ b/vlog4j-parser/src/test/resources/base.rls @@ -0,0 +1,4 @@ +@base . + +s(ex:c) . +p("abc") . From ab37e5ecf5677c58411d293717d37d1c6c8ee1e5 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 15:33:09 +0100 Subject: [PATCH 0805/1255] Parser: Make sure that named nulls are not parsed inside rule bodies --- .../semanticweb/vlog4j/parser/javacc/JavaCCParser.jj | 12 +++++++++--- .../vlog4j/parser/javacc/JavaCCParserBase.java | 9 +++++---- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 8dcbb3baf..2043e9c7d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -283,7 +283,13 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { s = absoluteIri() { return createConstant(s); } | t = < VARORPREDNAME > { return createConstant(t.image); } | LOOKAHEAD( < NAMED_NULL >, { isParsingOfNamedNullsAllowed() }) - t = < NAMED_NULL > { return createNamedNull(t.image); } + t = < NAMED_NULL > { + if (context == FormulaContext.BODY) { + throw new ParseException("Named nulls may not appear in the body of a rule."); + } + + return createNamedNull(t.image); + } | c = NumericLiteral() { return c; } | c = RDFLiteral() { return c; } | t = < UNIVAR > { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 497fa15e5..db34b00e4 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -144,10 +144,6 @@ AbstractConstant createConstant(String lexicalForm) throws ParseException { return Expressions.makeAbstractConstant(absoluteIri); } - NamedNull createNamedNull(String lexicalForm) { - return new NamedNullImpl(lexicalForm); - } - /** * Creates a suitable {@link Constant} from the parsed data. * @@ -163,6 +159,11 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } } + NamedNull createNamedNull(String lexicalForm) { + // @todo(mx): rename into uuid + return new NamedNullImpl(lexicalForm); + } + void addStatement(Statement statement) { knowledgeBase.addStatement(statement); } From 8745efb2cf5b35e9b622914042d844df9b8b016b Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 17:48:08 +0100 Subject: [PATCH 0806/1255] Remove failOnMissingHeaders from license plugin configuration Since failOnMissingHeaders is only relevant in dryRun mode, this doesn't change the behaviour of the plugin, but removes warnings from the build output. --- pom.xml | 3 -- .../implementation/RenamedNamedNull.java | 41 +++++++++++++++++++ .../vlog4j/parser/ParserConfiguration.java | 2 +- .../parser/javacc/JavaCCParserBase.java | 20 +++++++-- .../parser/RuleParserParseFactTest.java | 6 +-- 5 files changed, 61 insertions(+), 11 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java diff --git a/pom.xml b/pom.xml index ded2f85ff..ed6f23606 100644 --- a/pom.xml +++ b/pom.xml @@ -136,9 +136,6 @@ src/test/java true - - true java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java new file mode 100644 index 000000000..af1861859 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java @@ -0,0 +1,41 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +import java.util.UUID; + +/* + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.api.NamedNull; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; + +/** + * A {@link NamedNull} term that has been renamed during parsing. + * + * @author Maximilian Marx + */ +public class RenamedNamedNull extends NamedNullImpl { + private RenamedNamedNull(String name) { + super(name); + } + + public RenamedNamedNull(UUID name) { + this(name.toString()); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 8b766bf66..4015cb83b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -49,7 +49,7 @@ public class ParserConfiguration { /** * Whether parsing Named Nulls is allowed. */ - private boolean allowNamedNulls = false; + private boolean allowNamedNulls = true; /** * The registered data sources. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index db34b00e4..2a56108ef 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,5 +1,8 @@ package org.semanticweb.vlog4j.parser.javacc; +import java.io.ByteArrayOutputStream; +import java.io.IOException; + /*- * #%L * vlog4j-parser @@ -22,6 +25,7 @@ import java.util.HashSet; import java.util.List; +import java.util.UUID; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; @@ -34,7 +38,7 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; @@ -62,6 +66,7 @@ public class JavaCCParserBase { private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; + private byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -159,9 +164,16 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } } - NamedNull createNamedNull(String lexicalForm) { - // @todo(mx): rename into uuid - return new NamedNullImpl(lexicalForm); + NamedNull createNamedNull(String lexicalForm) throws ParseException { + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + try { + stream.write(namedNullNamespace); + stream.write(lexicalForm.getBytes()); + } catch (IOException e) { + throw makeParseExceptionWithCause("Failed to generate a unique name for named null", e); + } + + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); } void addStatement(Statement statement) { diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index f065eed5e..f9b55cedd 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -45,12 +45,12 @@ public class RuleParserParseFactTest { @Test public void parseFact_string_succeeds() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\") ."), factA); + assertEquals(factA, RuleParser.parseFact("p(\"a\") .")); } @Test public void parseFact_twoStrings_succeeds() throws ParsingException { - assertEquals(RuleParser.parseFact("p(\"a\",\"b\") ."), factAB); + assertEquals(factAB, RuleParser.parseFact("p(\"a\",\"b\") .")); } @Test(expected = ParsingException.class) @@ -75,7 +75,7 @@ public void parseFact_namedNull_throws() throws ParsingException { public void parseFact_namedNullAllowed_succeeds() throws ParsingException { String input = "p(_:1) ."; ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); - assertEquals(RuleParser.parseFact(input, parserConfiguration), fact1); + assertEquals(fact1, RuleParser.parseFact(input, parserConfiguration)); } @Test(expected = ParsingException.class) From 2ed313e9127343af0a824d0d52205c0ac9069261 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 18:26:20 +0100 Subject: [PATCH 0807/1255] Parser: Rename named nulls during parsing --- .../parser/javacc/JavaCCParserBase.java | 8 +++ .../parser/javacc/SubParserFactory.java | 3 ++ .../parser/ParserConfigurationTest.java | 17 +++--- .../vlog4j/parser/ParserTestUtils.java | 52 +++++++++++++++++++ .../parser/RuleParserParseFactTest.java | 34 ++++++++---- .../vlog4j/parser/RuleParserTest.java | 42 +++++++++++++-- vlog4j-parser/src/test/resources/blank.rls | 1 + 7 files changed, 136 insertions(+), 21 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java create mode 100644 vlog4j-parser/src/test/resources/blank.rls diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 2a56108ef..02e9e35f6 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -326,6 +326,14 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } + byte[] getNamedNullNamespace() { + return namedNullNamespace; + } + + void setNamedNullNamespace(byte[] namedNullNamespace) { + this.namedNullNamespace = namedNullNamespace; + } + public void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { this.prefixDeclarations = prefixDeclarations; } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 75019ef77..1f1269898 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -41,6 +41,7 @@ public class SubParserFactory { private final KnowledgeBase knowledgeBase; private final ParserConfiguration parserConfiguration; private final PrefixDeclarations prefixDeclarations; + private final byte[] namedNullNamespace; /** * Construct a SubParserFactory. @@ -51,6 +52,7 @@ public class SubParserFactory { this.knowledgeBase = parser.getKnowledgeBase(); this.prefixDeclarations = parser.getPrefixDeclarations(); this.parserConfiguration = parser.getParserConfiguration(); + this.namedNullNamespace = parser.getNamedNullNamespace(); } /** @@ -67,6 +69,7 @@ public JavaCCParser makeSubParser(final InputStream inputStream, final String en subParser.setKnowledgeBase(this.knowledgeBase); subParser.setPrefixDeclarations(this.prefixDeclarations); subParser.setParserConfiguration(this.parserConfiguration); + subParser.setNamedNullNamespace(this.namedNullNamespace); return subParser; } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java index e78a42ca5..236094222 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserConfigurationTest.java @@ -80,23 +80,24 @@ public void registerDatatype_dataSourceName_succeeds() { } @Test - public void isParsingOfNamedNullsAllowed_default_returnsFalse() { - assertFalse("named nulls are disallowed by default", parserConfiguration.isParsingOfNamedNullsAllowed()); + public void isParsingOfNamedNullsAllowed_default_returnsTrue() { + assertTrue("named nulls are allowed by default", parserConfiguration.isParsingOfNamedNullsAllowed()); } @Test - public void isParsingOfNamedNullsAllowed_enabled_returnsTrue() { - parserConfiguration.allowNamedNulls(); - assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + public void isParsingOfNamedNullsAllowed_disabled_returnsFalse() { + parserConfiguration.disallowNamedNulls(); + assertFalse("named nulls are disallowed after disallowing them", + parserConfiguration.isParsingOfNamedNullsAllowed()); } @Test - public void isParsingOfNamedNullsAllowed_enabledAndDisabled_returnsFalse() { - parserConfiguration.allowNamedNulls(); - assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + public void isParsingOfNamedNullsAllowed_disabledAndEnabled_returnsTrue() { parserConfiguration.disallowNamedNulls(); assertFalse("named nulls are disallowed after disallowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + parserConfiguration.allowNamedNulls(); + assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); } @Test(expected = ParsingException.class) diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java new file mode 100644 index 000000000..93877b1e9 --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/ParserTestUtils.java @@ -0,0 +1,52 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * VLog4j Syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertTrue; + +import java.util.List; +import java.util.UUID; + +import org.semanticweb.vlog4j.core.model.api.Literal; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; + +public interface ParserTestUtils { + public default void assertUuid(String uuidLike) { + try { + UUID.fromString(uuidLike); + } catch (IllegalArgumentException e) { + throw new AssertionError("expected a valid UUID, but got \"" + uuidLike + "\"", e); + } + } + + public default void assertArgumentIsNamedNull(Literal literal, int argument) { + List arguments = literal.getArguments(); + assertTrue("argument is positive", argument >= 1); + assertTrue("argument is a valid position", argument <= arguments.size()); + Term term = arguments.get(argument - 1); + assertTrue("argument is a named null", term instanceof NamedNullImpl); + + if (term instanceof RenamedNamedNull) { + assertUuid(term.getName()); + } + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index f9b55cedd..e2a56b1ce 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -25,23 +25,23 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; import org.semanticweb.vlog4j.parser.RuleParser; -public class RuleParserParseFactTest { +public class RuleParserParseFactTest implements ParserTestUtils { private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarations.XSD_STRING); private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarations.XSD_STRING); - private final NamedNull null1 = new NamedNullImpl("1"); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); - private final Fact fact1 = Expressions.makeFact("p", null1); @Test public void parseFact_string_succeeds() throws ParsingException { @@ -66,22 +66,36 @@ public void parseFact_arityZeroFact_throws() throws ParsingException { } @Test(expected = ParsingException.class) - public void parseFact_namedNull_throws() throws ParsingException { + public void parseFact_namedNullDisallowed_throws() throws ParsingException { String input = "p(_:1) ."; - RuleParser.parseFact(input); + ParserConfiguration parserConfiguration = new ParserConfiguration().disallowNamedNulls(); + RuleParser.parseFact(input, parserConfiguration); } @Test - public void parseFact_namedNullAllowed_succeeds() throws ParsingException { + public void parseFact_namedNull_succeeds() throws ParsingException { String input = "p(_:1) ."; - ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); - assertEquals(fact1, RuleParser.parseFact(input, parserConfiguration)); + Fact result = RuleParser.parseFact(input); + assertArgumentIsNamedNull(result, 1); } @Test(expected = ParsingException.class) public void parseFact_namedNullAsPredicateName_throws() throws ParsingException { String input = "_:p(\"a\") ."; - ParserConfiguration parserConfiguration = new ParserConfiguration().allowNamedNulls(); - RuleParser.parseFact(input, parserConfiguration); + RuleParser.parseFact(input); + } + + @Test(expected = ParsingException.class) + public void parseRule_namedNullInBody_throws() throws ParsingException { + String input = "q(_:head) :- p(_:body) ."; + RuleParser.parseRule(input); + } + + @Test + public void parseRule_namedNullInHead_succeeds() throws ParsingException { + String input = "q(_:head) :- p(\"a\") ."; + Rule result = RuleParser.parseRule(input); + Literal literal = result.getHead().getLiterals().get(0); + assertArgumentIsNamedNull(literal, 1); } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index a18f20aec..8c2f274e2 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -42,7 +42,7 @@ import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -public class RuleParserTest { +public class RuleParserTest implements ParserTestUtils { private final Variable x = Expressions.makeUniversalVariable("X"); private final Variable y = Expressions.makeExistentialVariable("Y"); @@ -373,10 +373,14 @@ public void testBlankPrefixDeclaration() throws ParsingException { RuleParser.parse(input); } - @Test(expected = ParsingException.class) + @Test public void testBlankNodeTerm() throws ParsingException { String input = "(_:blank) ."; - RuleParser.parse(input); + KnowledgeBase result = RuleParser.parse(input); + List facts = result.getFacts(); + + assertEquals(1, facts.size()); + assertArgumentIsNamedNull(facts.get(0), 1); } @Test(expected = ParsingException.class) @@ -455,6 +459,38 @@ public void parse_relativeImportStatement_succeeds() throws ParsingException { assertEquals(expected, result); } + @Test + public void parse_import_renamesNamedNulls() throws ParsingException { + String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List facts = knowledgeBase.getFacts(); + assertEquals(2, facts.size()); + Fact fact1 = facts.get(0); + Fact fact2 = facts.get(1); + + assertNotEquals(fact1, fact2); + assertArgumentIsNamedNull(fact1, 1); + assertArgumentIsNamedNull(fact2, 1); + } + + @Test + public void parse_reusedNamedNulls_identical() throws ParsingException { + String input = "p(_:blank) . q(_:blank) . p(_:other) ."; + + KnowledgeBase knowledgeBase = RuleParser.parse(input); + List facts = knowledgeBase.getFacts(); + assertEquals(3, facts.size()); + Fact fact1 = facts.get(0); + Fact fact2 = facts.get(1); + Fact fact3 = facts.get(2); + + assertEquals(fact1.getArguments().get(0), fact2.getArguments().get(0)); + assertNotEquals(fact1.getArguments().get(0), fact3.getArguments().get(0)); + assertArgumentIsNamedNull(fact1, 1); + assertArgumentIsNamedNull(fact2, 1); + assertArgumentIsNamedNull(fact3, 1); + } + @Test(expected = ParsingException.class) public void parseInto_duplicateImportStatements_throws() throws ParsingException { String input = "@import \"src/test/resources/facts.rls\" . "; diff --git a/vlog4j-parser/src/test/resources/blank.rls b/vlog4j-parser/src/test/resources/blank.rls new file mode 100644 index 000000000..cc44c5389 --- /dev/null +++ b/vlog4j-parser/src/test/resources/blank.rls @@ -0,0 +1 @@ +p(_:blank) . From 4271c7df5ba7da89b956c2ee5e299c1b680fbbcc Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 17 Feb 2020 20:36:01 +0100 Subject: [PATCH 0808/1255] Parser: Add more tests --- .../semanticweb/vlog4j/parser/RuleParser.java | 13 +-- .../vlog4j/parser/DirectiveArgumentTest.java | 84 +++++++++++++++++++ .../parser/RuleParserDataSourceTest.java | 40 ++++++++- .../vlog4j/parser/RuleParserTest.java | 29 ++++++- vlog4j-parser/src/test/resources/base.rls | 2 +- 5 files changed, 158 insertions(+), 10 deletions(-) create mode 100644 vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index cf26de098..e7784822d 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -53,6 +53,9 @@ public class RuleParser { private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); + private RuleParser() { + } + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { final JavaCCParser parser = new JavaCCParser(stream, encoding); @@ -85,13 +88,13 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration, baseIri); + parseInto(knowledgeBase, inputStream, parserConfiguration, baseIri); } public static void parseInto(final KnowledgeBase knowledgeBase, final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); + parseInto(knowledgeBase, inputStream, parserConfiguration); } public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding) @@ -107,7 +110,7 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea public static void parseInto(final KnowledgeBase knowledgeBase, final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - parseInto(knowledgeBase, inputStream, DEFAULT_STRING_ENCODING); + parseInto(knowledgeBase, inputStream); } public static KnowledgeBase parse(final InputStream stream, final String encoding, @@ -125,7 +128,7 @@ public static KnowledgeBase parse(final InputStream stream, final ParserConfigur public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, DEFAULT_STRING_ENCODING, parserConfiguration); + return parse(inputStream, parserConfiguration); } public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { @@ -138,7 +141,7 @@ public static KnowledgeBase parse(final InputStream stream) throws ParsingExcept public static KnowledgeBase parse(final String input) throws ParsingException { final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); - return parse(inputStream, DEFAULT_STRING_ENCODING); + return parse(inputStream); } /** diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java new file mode 100644 index 000000000..ccb84bc4f --- /dev/null +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/DirectiveArgumentTest.java @@ -0,0 +1,84 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.*; + +import java.net.URI; + +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; + +public class DirectiveArgumentTest { + private static final String STRING = "src/test/resources/facts.rls"; + private static final URI IRI = URI.create("https://example.org"); + private static final Term TERM = Expressions.makeDatatypeConstant(STRING, IRI.toString()); + + private static final DirectiveArgument STRING_ARGUMENT = DirectiveArgument.string(STRING); + private static final DirectiveArgument IRI_ARGUMENT = DirectiveArgument.iri(IRI); + private static final DirectiveArgument TERM_ARGUMENT = DirectiveArgument.term(TERM); + + @Test + public void equals_null_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals(null)); + assertFalse(IRI_ARGUMENT.equals(null)); + assertFalse(TERM_ARGUMENT.equals(null)); + } + + @Test + public void equals_self_returnsTrue() { + assertTrue(STRING_ARGUMENT.equals(STRING_ARGUMENT)); + assertTrue(IRI_ARGUMENT.equals(IRI_ARGUMENT)); + assertTrue(TERM_ARGUMENT.equals(TERM_ARGUMENT)); + } + + @Test + public void equals_equal_returnsTrue() { + assertTrue(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING))); + assertTrue(IRI_ARGUMENT.equals(DirectiveArgument.iri(IRI))); + assertTrue(TERM_ARGUMENT.equals(DirectiveArgument.term(TERM))); + } + + @Test + public void equals_notEqualButSameType_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals(DirectiveArgument.string(STRING + "test"))); + assertFalse(IRI_ARGUMENT.equals(DirectiveArgument.iri(URI.create("https://example.com")))); + assertFalse(TERM_ARGUMENT + .equals(DirectiveArgument.term(Expressions.makeDatatypeConstant(STRING, "https://example.com")))); + } + + @Test + public void equals_differentType_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals(IRI_ARGUMENT)); + assertFalse(STRING_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(IRI_ARGUMENT.equals(STRING_ARGUMENT)); + assertFalse(IRI_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(STRING_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(IRI_ARGUMENT)); + } + + @Test + public void equals_String_returnsFalse() { + assertFalse(STRING_ARGUMENT.equals((Object) "test")); + assertFalse(IRI_ARGUMENT.equals((Object) "test")); + assertFalse(TERM_ARGUMENT.equals((Object) "test")); + } +} diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java index c42975d90..84b0f2d65 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserDataSourceTest.java @@ -32,6 +32,7 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; +import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Predicate; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -146,7 +147,7 @@ public void testCustomDataSource() throws ParsingException { String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; List expectedArguments = Arrays.asList(DirectiveArgument.string("hello"), DirectiveArgument.string("world")); - RuleParser.parseDataSourceDeclaration(input, parserConfiguration); + RuleParser.parse(input, parserConfiguration); verify(handler).handleDirective(ArgumentMatchers.eq(expectedArguments), ArgumentMatchers.any()); @@ -194,4 +195,41 @@ public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws ParsingException, IOException { RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); } + + class DuplicatingDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + public DataSource handleDirective(List arguments, SubParserFactory subParserFactory) + throws ParsingException { + CsvFileDataSource source; + try { + source = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + } catch (IOException e) { + throw new ParsingException(e); + } + + KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); + RuleParser.parseInto(knowledgeBase, "@source q[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") .", + parserConfiguration); + + return source; + } + } + + @Test + public void parseInto_mockDataSourceWithBase_succeeds() throws ParsingException { + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); + String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; + KnowledgeBase knowledgeBase = new KnowledgeBase(); + RuleParser.parseInto(knowledgeBase, input, parserConfiguration, "https://example.org"); + assertEquals(2, knowledgeBase.getStatements().size()); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_unexpectedlyAddsTwoDatasources_throws() throws ParsingException { + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", new DuplicatingDataSourceDeclarationHandler()); + String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; + RuleParser.parseDataSourceDeclaration(input, parserConfiguration); + } } diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 8c2f274e2..25243d0cb 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -37,10 +37,12 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Variable; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.FormulaContext; public class RuleParserTest implements ParserTestUtils { @@ -374,7 +376,7 @@ public void testBlankPrefixDeclaration() throws ParsingException { } @Test - public void testBlankNodeTerm() throws ParsingException { + public void parse_NamedNullInFact_succeeds() throws ParsingException { String input = "(_:blank) ."; KnowledgeBase result = RuleParser.parse(input); List facts = result.getFacts(); @@ -383,8 +385,28 @@ public void testBlankNodeTerm() throws ParsingException { assertArgumentIsNamedNull(facts.get(0), 1); } + @Test + public void parseTerm_NamedNull_succeeds() throws ParsingException { + String input = "_:blank"; + Term result = RuleParser.parseTerm(input); + assertUuid(result.getName()); + } + + @Test + public void parseTerm_NamedNullInHead_succeeds() throws ParsingException { + String input = "_:blank"; + Term result = RuleParser.parseTerm(input, FormulaContext.HEAD); + assertUuid(result.getName()); + } + @Test(expected = ParsingException.class) - public void testBlankPredicateName() throws ParsingException { + public void parseTerm_NamedNullInBodyContext_throws() throws ParsingException { + String input = "_:blank"; + RuleParser.parseTerm(input, FormulaContext.BODY); + } + + @Test(expected = ParsingException.class) + public void testBParsingExceptione() throws ParsingException { String input = "_:(a) ."; RuleParser.parse(input); } @@ -505,10 +527,11 @@ public void parseInto_duplicateRelativeImportStatements_throws() throws ParsingE RuleParser.parseInto(knowledgeBase, input); } + @Test public void parseInto_relativeImportRedeclaringBase_succeeds() throws ParsingException { String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; KnowledgeBase knowledgeBase = RuleParser.parse(input); - List expected = Arrays.asList(fact1, fact2); + List expected = Arrays.asList(fact1, fact3); List result = knowledgeBase.getFacts(); assertEquals(expected, result); } diff --git a/vlog4j-parser/src/test/resources/base.rls b/vlog4j-parser/src/test/resources/base.rls index 998d6d6fb..69e925147 100644 --- a/vlog4j-parser/src/test/resources/base.rls +++ b/vlog4j-parser/src/test/resources/base.rls @@ -1,4 +1,4 @@ @base . -s(ex:c) . +s(c) . p("abc") . From 0fbdacdd288c673fcf810e2b0ce539a1dfc2cbb3 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 18 Feb 2020 10:33:55 +0100 Subject: [PATCH 0809/1255] Core: Fix spelling of VLogReasoner#getKnowledgeBasePredicates --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index b48b19f50..690eb2019 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -462,7 +462,7 @@ public Correctness writeInferences(OutputStream stream) throws IOException { throw new ReasonerStateException(this.reasonerState, "Obtaining inferences is not alowed before reasoner is loaded!"); } - final Set toBeQueriedHeadPredicates = getKnolwedgeBasePredicates(); + final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); for (final Predicate predicate : toBeQueriedHeadPredicates) { final PositiveLiteral queryAtom = getQueryAtom(predicate); @@ -619,7 +619,7 @@ public void onStatementsRemoved(List statementsRemoved) { updateCorrectnessOnStatementsRemoved(); } - Set getKnolwedgeBasePredicates() { + Set getKnowledgeBasePredicates() { final Set toBeQueriedHeadPredicates = new HashSet<>(); for (final Rule rule : this.knowledgeBase.getRules()) { for (final Literal literal : rule.getHead()) { From 7c9b6ab3796f1a15ccf8b679373869553e4a6cf2 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 18 Feb 2020 15:28:29 +0100 Subject: [PATCH 0810/1255] Core: Abbreviate IRIs in exported facts using known prefixes --- .../core/model/api/AbstractConstant.java | 16 ++- .../core/model/api/DatatypeConstant.java | 23 +-- .../core/model/api/ExistentialVariable.java | 14 +- .../model/api/LanguageStringConstant.java | 23 +-- .../vlog4j/core/model/api/NamedNull.java | 14 +- .../vlog4j/core/model/api/Predicate.java | 113 ++++++++------- .../vlog4j/core/model/api/Term.java | 25 +++- .../core/model/api/UniversalVariable.java | 15 +- .../MergeablePrefixDeclarations.java | 12 +- .../core/model/implementation/Serializer.java | 110 +++++++++++++-- .../vlog4j/core/reasoner/KnowledgeBase.java | 9 ++ .../vlog4j/core/reasoner/Reasoner.java | 41 +++--- .../reasoner/implementation/VLogReasoner.java | 13 +- .../MergeablePrefixDeclarationsTest.java | 50 ++++--- .../VLogReasonerWriteInferencesTest.java | 133 ++++++++++++++++++ .../VlogReasonerWriteInferencesTest.java | 101 ------------- 16 files changed, 455 insertions(+), 257 deletions(-) create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java delete mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java index 760b74358..41f7bd3ce 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,11 +20,15 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for abstract constants, i.e. for constants that represent an * abstract domain element (in contrast to a specific value of a concrete * datatype). Such terms are of type {@link TermType#ABSTRACT_CONSTANT}. - * + * * @author Markus Kroetzsch */ public interface AbstractConstant extends Constant { @@ -41,4 +43,8 @@ default String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return Serializer.getString(this, iriTransformer); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java index 3702b7b52..d4dfe19e8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,16 +20,20 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for datatype constants, i.e. for constants that represent a * specific value of a concrete datatype). Such terms are of type * {@link TermType#DATATYPE_CONSTANT}. - * + * * Note that datatype literal is a common name of the representation of * specific values for a datatype. We mostly avoid this meaning of * literal since a literal in logic is typically a negated or non-negated * atom. - * + * * @author Markus Kroetzsch */ public interface DatatypeConstant extends Constant { @@ -44,7 +46,7 @@ default TermType getType() { /** * Returns the datatype of this term, which is typically an IRI that defines how * to interpret the lexical value. - * + * * @return a non-blank String (not null, nor empty or whitespace). */ String getDatatype(); @@ -55,7 +57,7 @@ default TermType getType() { * strings that represent the same value, depending on the rules of the * datatype, and that there the value used here does not have to be a canonical * representation. - * + * * @return a non-null string */ String getLexicalValue(); @@ -64,4 +66,9 @@ default TermType getType() { default String getSyntacticRepresentation() { return Serializer.getString(this); } + + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return Serializer.getString(this, iriTransformer); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java index d573a7850..c62b7ce1b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for existentially quantified variables, i.e., variables that appear * in the scope of an (implicit) existential quantifier in a rule. @@ -40,4 +42,8 @@ default String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index 2c150b507..c38b0f901 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /*- * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,12 +20,16 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for string constants with a language tag, used to represent values * of type http://www.w3.org/1999/02/22-rdf-syntax-ns#langString in RDF, OWL, * and related languages used with knowledge graphs. Such terms are of type * {@link TermType#LANGSTRING_CONSTANT}. - * + * * @author Markus Kroetzsch */ public interface LanguageStringConstant extends Constant { @@ -40,7 +42,7 @@ default TermType getType() { /** * Returns the datatype of this term, which is always * http://www.w3.org/1999/02/22-rdf-syntax-ns#langString. - * + * * @return a IRI of RDF langString datatype */ default String getDatatype() { @@ -49,7 +51,7 @@ default String getDatatype() { /** * Returns the string value of the literal without the language tag. - * + * * @return a non-null string */ String getString(); @@ -58,7 +60,7 @@ default String getDatatype() { * Returns the language tag of the literal, which should be a lowercase string * that conforms to the BCP 47 * specification. - * + * * @return a non-empty string */ String getLanguageTag(); @@ -67,4 +69,9 @@ default String getDatatype() { default String getSyntacticRepresentation() { return Serializer.getConstantName(this); } + + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java index 5413b9365..297692483 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /* * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to * represent anonymous domain elements introduced during the reasoning process @@ -41,4 +43,8 @@ default String getSyntacticRepresentation() { return Serializer.getString(this); } + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java index 7b604f289..c7b92761b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java @@ -1,57 +1,56 @@ -package org.semanticweb.vlog4j.core.model.api; - -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * A Predicate represents a relation between terms. Is uniquely identified by - * its name and arity. The arity determines the number of terms allowed in the - * relation. For example, a Predicate with name {@code P} and arity {@code n} - * allows atomic formulae of the form {@code P(t1,...,tn)}. - * - * @author Irina Dragoste - * - */ -public interface Predicate extends Entity { - - /** - * The name of the Predicate. - * - * @return the name of the Predicate. - */ - String getName(); - - /** - * The arity represents the number of terms allowed as relation arguments for - * this Predicate. For example, a Predicate with name {@code P} and arity - * {@code n} allows atomic formulae of the form {@code P(t1,...,tn)}. - * - * @return the arity of the Predicate. - */ - int getArity(); - - @Override - default String getSyntacticRepresentation() { - return Serializer.getString(this); - } - -} +package org.semanticweb.vlog4j.core.model.api; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + +/** + * A Predicate represents a relation between terms. Is uniquely identified by + * its name and arity. The arity determines the number of terms allowed in the + * relation. For example, a Predicate with name {@code P} and arity {@code n} + * allows atomic formulae of the form {@code P(t1,...,tn)}. + * + * @author Irina Dragoste + * + */ +public interface Predicate extends Entity { + + /** + * The name of the Predicate. + * + * @return the name of the Predicate. + */ + String getName(); + + /** + * The arity represents the number of terms allowed as relation arguments for + * this Predicate. For example, a Predicate with name {@code P} and arity + * {@code n} allows atomic formulae of the form {@code P(t1,...,tn)}. + * + * @return the arity of the Predicate. + */ + int getArity(); + + @Override + default String getSyntacticRepresentation() { + return Serializer.getString(this); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java index 582493c69..54843893b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for terms. A term is characterized by a string name and a * {@link TermType}. @@ -32,21 +36,21 @@ public interface Term extends Entity { /** * Returns the name this term. The name uniquely identifies terms of the same * {@link TermType}. - * + * * @return a non-blank String (not null, nor empty or whitespace). */ String getName(); /** * Return the type of this term. - * + * * @return the type of this term */ TermType getType(); /** * Returns true if the term represents some kind of constant. - * + * * @return true if term is constant */ default boolean isConstant() { @@ -56,7 +60,7 @@ default boolean isConstant() { /** * Returns true if the term represents some kind of variable. - * + * * @return true if term is variable */ default boolean isVariable() { @@ -71,4 +75,13 @@ default boolean isVariable() { */ T accept(TermVisitor termVisitor); + /** + * Return the parseable string representation of this Term, transforming IRIs. + * + * @param iriTransformer a function that is applied to transform any IRIs + * occurring in the output. + * + * @return non-empty String containing the representation. + */ + String getSyntacticRepresentation(Function iriTransformer); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java index 975620a03..66e2d38a3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; - /* * #%L * VLog4j Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,10 @@ * #L% */ +import java.util.function.Function; + +import org.semanticweb.vlog4j.core.model.implementation.Serializer; + /** * Interface for universally quantified variables, i.e., variables that appear * in the scope of an (implicit) universal quantifier in a rule. @@ -39,4 +41,9 @@ default TermType getType() { default String getSyntacticRepresentation() { return Serializer.getString(this); } + + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index fbdf49b59..dbea5e454 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -104,10 +104,14 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); - prefixes.forEach((prefixName, baseIri) -> { + if (baseUri != EMPTY_BASE_PREFIX && iri.startsWith(baseUri) && !iri.equals(baseUri)) { + matches.put(iri.replaceFirst(baseUri, ""), baseUri.length()); + } + + prefixes.forEach((prefixName, prefixIri) -> { // only select proper prefixes here, since `eg:` is not a valid prefixed name. - if (iri.startsWith(baseIri) && !iri.equals(baseIri)) { - matches.put(iri.replaceFirst(baseIri, prefixName), baseIri.length()); + if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { + matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); } }); @@ -150,6 +154,8 @@ public Iterator iterator() { * @return this */ public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarations other) { + this.setBase(other.getBase()); + for (String prefixName : other) { String iri; try { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index b9f3c2b51..8d0325fd1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,7 +1,9 @@ package org.semanticweb.vlog4j.core.model.implementation; import java.util.List; +import java.util.function.Function; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; /*- @@ -38,6 +40,7 @@ import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -65,6 +68,8 @@ public final class Serializer { public static final String RULE_SEPARATOR = " :- "; public static final char AT = '@'; public static final String DATA_SOURCE = "@source "; + public static final String BASE = "@base "; + public static final String PREFIX = "@prefix "; public static final String CSV_FILE_DATA_SOURCE = "load-csv"; public static final String RDF_FILE_DATA_SOURCE = "load-rdf"; public static final String SPARQL_QUERY_RESULT_DATA_SOURCE = "sparql"; @@ -148,6 +153,18 @@ public static String getFactString(final Fact fact) { return getString(fact) + STATEMENT_SEPARATOR; } + /** + * Creates a String representation of a given {@link Constant}. + * + * @see Rule syntax . + * @param constant a {@link Constant} + * @param iriTransformer a function to transform IRIs with. + * @return String representation corresponding to a given {@link Constant}. + */ + public static String getString(final AbstractConstant constant, Function iriTransformer) { + return getIRIString(constant.getName(), iriTransformer); + } + /** * Creates a String representation of a given {@link Constant}. * @@ -178,23 +195,49 @@ public static String getConstantName(final LanguageStringConstant languageString * * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} + * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getString(final DatatypeConstant datatypeConstant) { + public static String getString(final DatatypeConstant datatypeConstant, Function iriTransformer) { if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { return getString(datatypeConstant.getLexicalValue()); + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { + return datatypeConstant.getLexicalValue(); } else { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { - return datatypeConstant.getLexicalValue(); - } else { - return getConstantName(datatypeConstant); - } + return getConstantName(datatypeConstant, iriTransformer); } } + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} without an IRI. + * + * @see Rule syntax . + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getString(final DatatypeConstant datatypeConstant) { + return getString(datatypeConstant, Function.identity()); + } + + /** + * Creates a String representation corresponding to the name of a given + * {@link DatatypeConstant} including an IRI. + * + * @see Rule syntax . + * @param datatypeConstant a {@link DatatypeConstant} + * @return String representation corresponding to a given + * {@link DatatypeConstant}. + */ + public static String getConstantName(final DatatypeConstant datatypeConstant, Function iriTransformer) { + return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + } + /** * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. @@ -318,6 +361,16 @@ private static String getFileString(final FileDataSource fileDataSource) { } private static String getIRIString(final String string) { + return getIRIString(string, Function.identity()); + } + + private static String getIRIString(final String string, Function iriTransformer) { + String transformed = iriTransformer.apply(string); + + if (!transformed.equals(string)) { + return transformed; + } + if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { return addAngleBrackets(string); @@ -382,12 +435,20 @@ private static String addAngleBrackets(final String string) { public static String getFactString(Predicate predicate, List terms) { return getString(predicate, terms) + STATEMENT_SEPARATOR + NEW_LINE; + } + public static String getFactString(Predicate predicate, List terms, Function iriTransformer) { + return getString(predicate, terms, iriTransformer) + STATEMENT_SEPARATOR + "\n"; } public static String getString(Predicate predicate, List terms) { - final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName())); + return getString(predicate, terms, Function.identity()); + } + + public static String getString(Predicate predicate, List terms, Function iriTransformer) { + final StringBuilder stringBuilder = new StringBuilder(getIRIString(predicate.getName(), iriTransformer)); stringBuilder.append(OPENING_PARENTHESIS); + boolean first = true; for (final Term term : terms) { if (first) { @@ -395,12 +456,41 @@ public static String getString(Predicate predicate, List terms) { } else { stringBuilder.append(COMMA); } - final String string = term.getSyntacticRepresentation(); + final String string = term.getSyntacticRepresentation(iriTransformer); stringBuilder.append(string); } stringBuilder.append(CLOSING_PARENTHESIS); return stringBuilder.toString(); + } + + public static String getBaseString(KnowledgeBase knowledgeBase) { + String baseIri = knowledgeBase.getBase(); + + if (baseIri.equals("")) { + return ""; + } + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + "\n"; } + public static String getPrefixString(String prefixName, String prefixIri) { + return PREFIX + prefixName + " " + addAngleBrackets(prefixIri) + STATEMENT_SEPARATOR + "\n"; + } + + public static String getBaseAndPrefixDeclarations(KnowledgeBase knowledgeBase) { + StringBuilder sb = new StringBuilder(); + + sb.append(getBaseString(knowledgeBase)); + + knowledgeBase.getPrefixes().forEachRemaining((String prefixName) -> { + try { + sb.append(getPrefixString(prefixName, knowledgeBase.getPrefix(prefixName))); + } catch (PrefixDeclarationException e) { + // this shouldn't throw, since we're iterating over known prefixes. + throw new RuntimeException(e); + } + }); + + return sb.toString(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index b8735c4fd..92d3b779c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -508,6 +508,15 @@ public String getBase() { return this.prefixDeclarations.getBase(); } + /* + * Return the declared prefixes. + * + * @return an iterator over all known prefixes. + */ + public Iterator getPrefixes() { + return this.prefixDeclarations.iterator(); + } + /** * Resolve {@code prefixName} into the declared IRI. * diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 55985c1e2..968c53b51 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.reasoner; import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -83,7 +84,7 @@ static Reasoner getInstance() { /** * Getter for the knowledge base to reason on. - * + * * @return the reasoner's knowledge base */ KnowledgeBase getKnowledgeBase(); @@ -91,7 +92,7 @@ static Reasoner getInstance() { /** * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. - * + * * @param an OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. @@ -103,13 +104,17 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to a desired file. * - * @param a String of the file path for the facts to be written to. + * @param filePath a String of the file path for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException * @throws FileNotFoundException */ - Correctness writeInferences(String filePath) throws FileNotFoundException, IOException; + default Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { + try (OutputStream stream = new FileOutputStream(filePath)) { + return writeInferences(stream); + } + } /** * Sets the algorithm that will be used for reasoning over the knowledge base. @@ -203,7 +208,7 @@ static Reasoner getInstance() { /** * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, * Cyclic, or cyclicity cannot be determined. - * + * * @return the appropriate CyclicityResult. */ CyclicityResult checkForCycles(); @@ -215,7 +220,7 @@ static Reasoner getInstance() { * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will * always terminate. - * + * * @return {@code true}, if the loaded set of rules is Joint Acyclic with * respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -228,7 +233,7 @@ static Reasoner getInstance() { * RJA, then, for the given set of rules and any facts over the given EDB * predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE Restricted chase} * will always terminate - * + * * @return {@code true}, if the loaded set of rules is Restricted Joint Acyclic * with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -242,7 +247,7 @@ static Reasoner getInstance() { * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will * always terminate - * + * * @return {@code true}, if the loaded set of rules is Model-Faithful Acyclic * with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -256,7 +261,7 @@ static Reasoner getInstance() { * over the given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE * Restricted chase} will always terminate. If a set of rules and EDB predicates * is MFA, then it is also JA. - * + * * @return {@code true}, if the loaded set of rules is Restricted Model-Faithful * Acyclic with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -271,7 +276,7 @@ static Reasoner getInstance() { * is guaranteed not to terminate for the loaded rules. If a set of rules and * EDB predicates is RMFA, then it is also RJA. Therefore, if a set or rules and * EDB predicates is MFC, it is not MFA, nor JA. - * + * * @return {@code true}, if the loaded set of rules is Model-Faithful Cyclic * with respect to the EDB predicates of loaded facts.
              * {@code false}, otherwise @@ -290,7 +295,7 @@ static Reasoner getInstance() { * more cases.
              * To avoid non-termination, a reasoning timeout can be set * ({@link Reasoner#setReasoningTimeout(Integer)}).
              - * + * * @return *
                *
              • {@code true}, if materialisation reached completion.
              • @@ -315,7 +320,7 @@ static Reasoner getInstance() { * A query answer is represented by a {@link QueryResult}. A query can have * multiple, distinct query answers. This method returns an Iterator over these * answers.
                - * + * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} * ({@link QueryResultIterator#getCorrectness()}): @@ -340,7 +345,7 @@ static Reasoner getInstance() { * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, * in order to obtain correct query answers. *
              - * + * * * @param query a {@link PositiveLiteral} representing the query to be * answered. @@ -360,7 +365,7 @@ static Reasoner getInstance() { * * Evaluates an atomic ({@code query}), and counts the number of query answer * implicit facts loaded into the reasoner and the number of query answer * explicit facts materialised by the reasoner. - * + * * @param query a {@link PositiveLiteral} representing the query to be answered. * * @return a {@link QueryAnswerCount} object that contains the query answers @@ -387,7 +392,7 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * the {@code query} are matched by terms in the fact, either named * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The * same variable name identifies the same term in the answer fact.
              - * + * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} * ({@link QueryResultIterator#getCorrectness()}): @@ -412,14 +417,14 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, * in order to obtain correct query answers. *
            - * + * * * @param query a {@link PositiveLiteral} representing the query to be * answered. * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} * terms will be counted. Otherwise, facts with * {@link TermType#NAMED_NULL} terms will be ignored. - * + * * @return a {@link QueryAnswerCount} object that contains the query answers * Correctness and the number query answers, i.e. the number of facts in * the extension of the query. @@ -482,7 +487,7 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * ({@link Reasoner#reason()}) is required, in order to obtain correct * query answers. *
          - * + * */ Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeNulls) throws IOException; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index 690eb2019..ebbf50ad1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -10,6 +10,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; @@ -464,6 +465,8 @@ public Correctness writeInferences(OutputStream stream) throws IOException { } final Set toBeQueriedHeadPredicates = getKnowledgeBasePredicates(); + stream.write(Serializer.getBaseAndPrefixDeclarations(knowledgeBase).getBytes()); + for (final Predicate predicate : toBeQueriedHeadPredicates) { final PositiveLiteral queryAtom = getQueryAtom(predicate); final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); @@ -471,7 +474,8 @@ public Correctness writeInferences(OutputStream stream) throws IOException { while (answers.hasNext()) { final karmaresearch.vlog.Term[] vlogTerms = answers.next(); final List termList = VLogToModelConverter.toTermList(vlogTerms); - stream.write(Serializer.getFactString(predicate, termList).getBytes()); + stream.write(Serializer.getFactString(predicate, termList, knowledgeBase::unresolveAbsoluteIri) + .getBytes()); } } catch (final NotStartedException e) { throw new RuntimeException("Inconsistent reasoner state.", e); @@ -484,13 +488,6 @@ public Correctness writeInferences(OutputStream stream) throws IOException { return this.correctness; } - @Override - public Correctness writeInferences(String filePath) throws FileNotFoundException, IOException { - try (OutputStream stream = new FileOutputStream(filePath)) { - return writeInferences(stream); - } - } - private void logWarningOnCorrectness() { if (this.correctness != Correctness.SOUND_AND_COMPLETE) { LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java index a83ff8f3e..f515cd6db 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java @@ -48,33 +48,33 @@ public void init() { @Test public void setBase_changingBase_succeeds() { prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.getBase(), BASE); + assertEquals(BASE, prefixDeclarations.getBase()); prefixDeclarations.setBase(MORE_SPECIFIC); - assertEquals(prefixDeclarations.getBase(), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getBase()); } @Test public void setBase_redeclareSameBase_succeeds() { prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.getBase(), BASE); + assertEquals(BASE, prefixDeclarations.getBase()); prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.getBase(), BASE); + assertEquals(BASE, prefixDeclarations.getBase()); } @Test public void absolutize_noBase_identical() { - assertEquals(prefixDeclarations.absolutize(RELATIVE), RELATIVE); + assertEquals(RELATIVE, prefixDeclarations.absolutize(RELATIVE)); } @Test public void absolutize_base_absoluteIri() { prefixDeclarations.setBase(BASE); - assertEquals(prefixDeclarations.absolutize(RELATIVE), BASE + RELATIVE); + assertEquals(BASE + RELATIVE, prefixDeclarations.absolutize(RELATIVE)); } @Test public void absolutize_absoluteIri_identical() { - assertEquals(prefixDeclarations.absolutize(BASE), BASE); + assertEquals(BASE, prefixDeclarations.absolutize(BASE)); } @Test(expected = PrefixDeclarationException.class) @@ -85,7 +85,7 @@ public void resolvePrefixedName_undeclaredPrefix_throws() throws PrefixDeclarati @Test public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE), BASE + RELATIVE); + assertEquals(BASE + RELATIVE, prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE)); } @Test @@ -94,13 +94,14 @@ public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws Pref prefixDeclarations.setPrefix(prefix, BASE); String resolved = BASE + RELATIVE; String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); - assertEquals(prefixDeclarations.resolvePrefixedName(unresolved), resolved); + assertEquals(resolved, prefixDeclarations.resolvePrefixedName(unresolved)); } @Test - public void setPrefix_redeclarePrefix_succeeds() { + public void setPrefix_redeclarePrefix_succeeds() throws PrefixDeclarationException { prefixDeclarations.setPrefix("eg:", BASE); prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + assertEquals(BASE, prefixDeclarations.getPrefix("eg:")); } @Test @@ -110,14 +111,14 @@ public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDe prefixDeclarations.setPrefix("eg:", BASE); prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); - assertEquals(prefixDeclarations.getPrefix(prefix + "1:"), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix(prefix + "1:")); } @Test public void mergeablePrefixDeclarations_constructor_succeeds() throws PrefixDeclarationException { this.prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(this.prefixDeclarations); - assertEquals(prefixDeclarations.getPrefix("eg:"), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix("eg:")); } @Test(expected = RuntimeException.class) @@ -132,42 +133,49 @@ public void mergePrefixDeclarations_getPrefixUnexpectedlyThrows_throws() throws @Test public void unresolveAbsoluteIri_default_identical() { - assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); } @Test public void unresolveAbsoluteIri_declaredPrefix_succeeds() { - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedPrefix_identical() { prefixDeclarations.setPrefix("eg:", UNRELATED); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { prefixDeclarations.setPrefix("ex:", UNRELATED); prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC), "eg:example/"); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { prefixDeclarations.setPrefix("eg:", BASE); prefixDeclarations.setPrefix("ex:", MORE_SPECIFIC); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "ex:" + RELATIVE); + assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); prefixDeclarations.setPrefix("er:", EVEN_MORE_SPECIFIC); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE), "er:test"); + assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test public void unresolveAbsoluteIri_exactPrefixMatch_identical() { prefixDeclarations.setPrefix("eg:", BASE); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(BASE), BASE); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); + } + + @Test + public void unresolveAbsoluteIri_baseIsMoreSpecific_baseWins() { + prefixDeclarations.setBase(MORE_SPECIFIC); + prefixDeclarations.setPrefix("eg:", BASE); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test @@ -176,6 +184,6 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref prefixDeclarations.setPrefix(prefix, BASE); String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); - assertEquals(prefixDeclarations.unresolveAbsoluteIri(resolved), unresolved); + assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java new file mode 100644 index 000000000..4ef6212c1 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -0,0 +1,133 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.vlog4j.core.model.api.Conjunction; +import org.semanticweb.vlog4j.core.model.api.Constant; +import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; +import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.Reasoner; + +/*- + * #%L + * VLog4j Core Components + * %% + * Copyright (C) 2018 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class VLogReasonerWriteInferencesTest { + private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); + private final Fact fact = Expressions.makeFact("http://example.org/s", c); + private final AbstractConstant dresdenConst = Expressions.makeAbstractConstant("dresden"); + private final Predicate locatedInPred = Expressions.makePredicate("LocatedIn", 2); + private final Predicate addressPred = Expressions.makePredicate("address", 4); + private final Predicate universityPred = Expressions.makePredicate("university", 2); + private final UniversalVariable varX = Expressions.makeUniversalVariable("X"); + private final UniversalVariable varY = Expressions.makeUniversalVariable("Y"); + private final PositiveLiteral pl1 = Expressions.makePositiveLiteral(locatedInPred, varX, varY); + private final PositiveLiteral pl2 = Expressions.makePositiveLiteral("location", varX, varY); + private final PositiveLiteral pl3 = Expressions.makePositiveLiteral(addressPred, varX, + Expressions.makeExistentialVariable("Y"), Expressions.makeExistentialVariable("Z"), + Expressions.makeExistentialVariable("Q")); + private final PositiveLiteral pl4 = Expressions.makePositiveLiteral(locatedInPred, + Expressions.makeExistentialVariable("Q"), Expressions.makeUniversalVariable("F")); + private final PositiveLiteral pl5 = Expressions.makePositiveLiteral(universityPred, varX, + Expressions.makeUniversalVariable("F")); + private final Conjunction conjunction = Expressions.makePositiveConjunction(pl3, pl4); + private final Rule rule1 = Expressions.makeRule(pl1, pl2); + private final Rule rule2 = Expressions.makeRule(conjunction, Expressions.makeConjunction(pl5)); + private final Fact f1 = Expressions.makeFact(locatedInPred, Expressions.makeAbstractConstant("Egypt"), + Expressions.makeAbstractConstant("Africa")); + private final Fact f2 = Expressions.makeFact(addressPred, Expressions.makeAbstractConstant("TSH"), + Expressions.makeAbstractConstant("Pragerstraße13"), Expressions.makeAbstractConstant("01069"), + dresdenConst); + private final Fact f3 = Expressions.makeFact("city", dresdenConst); + private final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); + private final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), + Expressions.makeAbstractConstant("germany")); + private final InMemoryDataSource locations = new InMemoryDataSource(2, 1); + private KnowledgeBase kb; + + @Before + public void initKb() { + kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatements(rule1, rule2, f1, f2, f3, f4, f5); + locations.addTuple("dresden", "germany"); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); + } + + @Test + public void writeInferences_example_succeeds() throws IOException { + assertEquals(10, getInferences().size()); + } + + @Test + public void writeInferences_withPrefixDeclarations_abbreviatesIris() + throws IOException, PrefixDeclarationException { + PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); + when(prefixDeclarations.getBase()).thenReturn(""); + when(prefixDeclarations.getPrefix(eq("eg:"))).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:").iterator()); + kb.mergePrefixDeclarations(prefixDeclarations); + + assertEquals(11, getInferences().size()); + assertTrue("the abbreviated fact is present", getInferences().contains("eg:s(eg:c) .")); + } + + @Test + public void writeInferences_withBase_writesBase() throws IOException, PrefixDeclarationException { + PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); + when(prefixDeclarations.getBase()).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(Arrays.asList().iterator()); + kb.mergePrefixDeclarations(prefixDeclarations); + + assertEquals(11, getInferences().size()); + assertTrue("the base declaration is present", getInferences().contains("@base .")); + } + + private List getInferences() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + reasoner.writeInferences(stream); + stream.flush(); + + Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s*)\\.\\s*")); + + return inferences.map((String inference) -> inference + ".").collect(Collectors.toList()); + } + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java deleted file mode 100644 index 73af579c0..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VlogReasonerWriteInferencesTest.java +++ /dev/null @@ -1,101 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; - -import java.io.BufferedReader; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.StringReader; -import java.util.ArrayList; -import java.util.List; -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public class VlogReasonerWriteInferencesTest { - final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); - final Fact fact = Expressions.makeFact("http://example.org/s", c); - final AbstractConstant dresdenConst = Expressions.makeAbstractConstant("dresden"); - final Predicate locatedInPred = Expressions.makePredicate("LocatedIn", 2); - final Predicate addressPred = Expressions.makePredicate("address", 4); - final Predicate universityPred = Expressions.makePredicate("university", 2); - final UniversalVariable varX = Expressions.makeUniversalVariable("X"); - final UniversalVariable varY = Expressions.makeUniversalVariable("Y"); - final PositiveLiteral pl1 = Expressions.makePositiveLiteral(locatedInPred, varX, varY); - final PositiveLiteral pl2 = Expressions.makePositiveLiteral("location", varX, varY); - final PositiveLiteral pl3 = Expressions.makePositiveLiteral(addressPred, varX, - Expressions.makeExistentialVariable("Y"), Expressions.makeExistentialVariable("Z"), - Expressions.makeExistentialVariable("Q")); - final PositiveLiteral pl4 = Expressions.makePositiveLiteral(locatedInPred, Expressions.makeExistentialVariable("Q"), - Expressions.makeUniversalVariable("F")); - final PositiveLiteral pl5 = Expressions.makePositiveLiteral(universityPred, varX, - Expressions.makeUniversalVariable("F")); - final Conjunction conjunction = Expressions.makePositiveConjunction(pl3, pl4); - final Rule rule1 = Expressions.makeRule(pl1, pl2); - final Rule rule2 = Expressions.makeRule(conjunction, Expressions.makeConjunction(pl5)); - final Fact f1 = Expressions.makeFact(locatedInPred, Expressions.makeAbstractConstant("Egypt"), - Expressions.makeAbstractConstant("Africa")); - final Fact f2 = Expressions.makeFact(addressPred, Expressions.makeAbstractConstant("TSH"), - Expressions.makeAbstractConstant("Pragerstraße13"), Expressions.makeAbstractConstant("01069"), - dresdenConst); - final Fact f3 = Expressions.makeFact("city", dresdenConst); - final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); - final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), - Expressions.makeAbstractConstant("germany")); - final InMemoryDataSource locations = new InMemoryDataSource(2, 1); - - @Test - public void testWriteInferences() throws IOException { - KnowledgeBase kb = new KnowledgeBase(); - kb.addStatement(fact); - kb.addStatements(rule1, rule2, f1, f2, f3, f4, f5); - locations.addTuple("dresden", "germany"); - kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); - List inferences = new ArrayList(); - try (final Reasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - reasoner.writeInferences(stream); - stream.flush(); - try (BufferedReader input = new BufferedReader(new StringReader(stream.toString()))) { - String factString = ""; - while ((factString = input.readLine()) != null) { - inferences.add(factString); - } - - } - assertEquals(10, inferences.size()); - } - - } -} From 43d4ec3b19c274a396d71933e6fc824cd70bca4c Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 18 Feb 2020 15:49:52 +0100 Subject: [PATCH 0811/1255] Core: Fix VLogReasonerWriteInferenceTest on Java 8 Java 8 regexes do not allow unbounded lookbehind. --- .../implementation/VLogReasonerWriteInferencesTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 4ef6212c1..036335328 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -125,7 +125,7 @@ private List getInferences() throws IOException { reasoner.writeInferences(stream); stream.flush(); - Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s*)\\.\\s*")); + Stream inferences = Arrays.stream(stream.toString().split("(?<=[>)]\\s?)\\.\\s*")); return inferences.map((String inference) -> inference + ".").collect(Collectors.toList()); } From 5757c2b3ce0ce0c97750ae952927d588e870a24d Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 21 Feb 2020 14:57:40 +0100 Subject: [PATCH 0812/1255] Core: Simplify sorting in MergeablePrefixDeclarations --- .../model/implementation/MergeablePrefixDeclarations.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java index dbea5e454..6365a77a9 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java @@ -22,6 +22,7 @@ import java.net.URI; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -116,10 +117,8 @@ public String unresolveAbsoluteIri(String iri) { }); List matchesByLength = new ArrayList<>(matches.keySet()); - matchesByLength.sort((left, right) -> { - // inverse order, so we get the longest match first - return matches.get(right).compareTo(matches.get(left)); - }); + // reverse order, so we get the longest match first + matchesByLength.sort(Comparator.comparing(matches::get).reversed()); if (matchesByLength.size() > 0) { return matchesByLength.get(0); From 2e52f864a5b63108c269071c03af5e1c743ac1e4 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 28 Feb 2020 19:10:50 +0100 Subject: [PATCH 0813/1255] Core: Refactor handling of prefix declarations --- .../PrefixDeclarationException.java | 3 - .../core/exceptions/VLog4jException.java | 11 +- .../model/api/LanguageStringConstant.java | 2 +- ...ns.java => PrefixDeclarationRegistry.java} | 49 ++++- .../AbstractPrefixDeclarationRegistry.java | 94 +++++++++ .../MergeablePrefixDeclarations.java | 183 ------------------ .../MergingPrefixDeclarationRegistry.java | 151 +++++++++++++++ .../core/model/implementation/Serializer.java | 36 ++-- .../vlog4j/core/reasoner/KnowledgeBase.java | 26 +-- ...MergingPrefixDeclarationRegistryTest.java} | 109 +++++------ .../vlog4j/core/model/TermImplTest.java | 6 +- .../core/reasoner/KnowledgeBaseTest.java | 8 +- .../VLogReasonerWriteInferencesTest.java | 23 ++- .../vlog4j/parser/DirectiveHandler.java | 8 +- .../LocalPrefixDeclarationRegistry.java | 94 +++++++++ .../parser/LocalPrefixDeclarations.java | 118 ----------- .../vlog4j/parser/ParserConfiguration.java | 4 +- .../vlog4j/parser/ParsingException.java | 15 +- .../semanticweb/vlog4j/parser/RuleParser.java | 8 +- .../ImportFileRelativeDirectiveHandler.java | 8 +- .../vlog4j/parser/javacc/JavaCCParser.jj | 8 +- .../parser/javacc/JavaCCParserBase.java | 26 +-- .../parser/javacc/SubParserFactory.java | 8 +- .../parser/RuleParserParseFactTest.java | 6 +- .../vlog4j/parser/RuleParserTest.java | 32 +-- .../vlog4j/rdf/RdfValueToTermConverter.java | 4 +- .../vlog4j/rdf/TestConvertRdfFileToFacts.java | 4 +- 27 files changed, 550 insertions(+), 494 deletions(-) rename vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/{PrefixDeclarations.java => PrefixDeclarationRegistry.java} (58%) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java delete mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java rename vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/{MergeablePrefixDeclarationsTest.java => MergingPrefixDeclarationRegistryTest.java} (54%) create mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java delete mode 100644 vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index 7c209c5cf..78cb523c1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -21,9 +21,6 @@ */ public class PrefixDeclarationException extends VLog4jException { - /** - * - */ private static final long serialVersionUID = 1L; public PrefixDeclarationException(String errorMessage) { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java index efcb1bf29..c87c6ca3b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -32,6 +32,10 @@ public class VLog4jException extends Exception { */ private static final long serialVersionUID = 8305375071519734590L; + public VLog4jException(Throwable cause) { + super(cause); + } + public VLog4jException(String message, Throwable cause) { super(message, cause); } @@ -40,4 +44,7 @@ public VLog4jException(String message) { super(message); } + public VLog4jException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java index c38b0f901..0f1296d0f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java @@ -46,7 +46,7 @@ default TermType getType() { * @return a IRI of RDF langString datatype */ default String getDatatype() { - return PrefixDeclarations.RDF_LANGSTRING; + return PrefixDeclarationRegistry.RDF_LANGSTRING; } /** diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java similarity index 58% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java rename to vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java index 942c10a60..ae4c1f6c4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarations.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.model.api; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; - /*- * #%L * vlog4j-syntax @@ -22,13 +20,17 @@ * #L% */ +import java.util.Map.Entry; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; + /** * Registry that manages prefixes and base namespace declarations as used for * parsing and serialising inputs. * * @author Markus Kroetzsch */ -public interface PrefixDeclarations extends Iterable { +public interface PrefixDeclarationRegistry extends Iterable> { static final String XSD = "http://www.w3.org/2001/XMLSchema#"; static final String XSD_STRING = "http://www.w3.org/2001/XMLSchema#string"; @@ -39,28 +41,57 @@ public interface PrefixDeclarations extends Iterable { static final String XSD_BOOLEAN = "http://www.w3.org/2001/XMLSchema#boolean"; static final String RDF_LANGSTRING = "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString"; + static final String EMPTY_BASE = ""; + static final String PREFIX_NAME_SEPARATOR = ":"; + /** * Returns the relevant base namespace. This should always return a result, * possibly using a local default value if no base was declared. * * @return string of an absolute base IRI */ - String getBase(); + String getBaseIri(); /** * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. * - * @param base the new base namespace + * @param baseIri the new base namespace * @throws PrefixDeclarationException if base was already defined */ - void setBase(String base) throws PrefixDeclarationException; + void setBaseIri(String baseIri) throws PrefixDeclarationException; - String getPrefix(String prefix) throws PrefixDeclarationException; + /** + * Returns the IRI associated with a given prefix name. + * + * @param prefixName the name of the prefix. + * @throws PrefixDeclarationException if prefixName was not defined. + */ + String getPrefixIri(String prefixName) throws PrefixDeclarationException; - void setPrefix(String prefix, String iri) throws PrefixDeclarationException; + /** + * Registers a prefix declaration. Behaviour is implementation-defined if + * prefixName has already been registered. + * + * @param prefixName the name of the prefix. + * @param prefixIri the IRI of the prefix. + * + * @throws PrefixDeclarationException when prefixName is already registered, at + * the discretion of the implementation. + */ + void setPrefixIri(String prefixName, String prefixIri) throws PrefixDeclarationException; + /** + * Turn a prefixed name into an absolute IRIna. + */ String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; - String absolutize(String prefixedName) throws PrefixDeclarationException; + /** + * Turn a prefixed name or a potentially relative IRI into an absolute IRI. + * + * @param prefixedNameOrIri either a prefixedName or an IRI. + * @throws PrefixDeclarationException when called on a prefixedName using an + * unknown prefixName. + */ + String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarationException; } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java new file mode 100644 index 000000000..d41bfabfb --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -0,0 +1,94 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; + +/** + * Implementation of the common logic for prefix declaration registries. + * + * @author Maximilian Marx + */ +public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclarationRegistry { + /** + * Map associating each prefixName with the full prefixIri. + */ + protected Map prefixes = new HashMap<>(); + + /** + * Iri holding the base namespace. + */ + protected String baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + + @Override + public String getBaseIri() { + return baseUri; + } + + @Override + public String getPrefixIri(String prefixName) throws PrefixDeclarationException { + if (!prefixes.containsKey(prefixName)) { + throw new PrefixDeclarationException( + "Prefix \"" + prefixName + "\" cannot be resolved (not declared yet)."); + } + + return prefixes.get(prefixName); + } + + @Override + public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + int colon = prefixedName.indexOf(":"); + String prefix = prefixedName.substring(0, colon + 1); + String suffix = prefixedName.substring(colon + 1); + + return getPrefixIri(prefix) + suffix; + } + + @Override + public String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarationException { + URI relative; + + try { + relative = new URI(potentiallyRelativeIri); + } catch (URISyntaxException e) { + throw new PrefixDeclarationException("Failed to parse IRI", e); + } + + if (relative.isAbsolute()) { + return potentiallyRelativeIri; + } else { + return getBaseIri() + potentiallyRelativeIri; + } + } + + @Override + public Iterator> iterator() { + return this.prefixes.entrySet().iterator(); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java deleted file mode 100644 index 6365a77a9..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergeablePrefixDeclarations.java +++ /dev/null @@ -1,183 +0,0 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -/*- - * #%L - * vlog4j-parser - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.net.URI; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; - -/** - * Implementation of {@link PrefixDeclarations} that is suitable for - * incrementally parsing from multiple sources. When trying to merge in - * conflicting prefix declarations, a fresh non-conflicting prefix is generated - * instead. - * - * @author Maximilian Marx - */ -final public class MergeablePrefixDeclarations implements PrefixDeclarations { - private Map prefixes = new HashMap<>(); - - private String baseUri = EMPTY_BASE_PREFIX; - private long nextIndex = 0; - - private static final String EMPTY_BASE_PREFIX = ""; - private static final String GENERATED_PREFIX_PREFIX = "vlog4j_generated_"; - - public MergeablePrefixDeclarations() { - } - - public MergeablePrefixDeclarations(final PrefixDeclarations prefixDeclarations) { - super(); - mergePrefixDeclarations(prefixDeclarations); - } - - @Override - public String getBase() { - return baseUri; - } - - @Override - public void setBase(String base) { - if (base != this.baseUri && this.baseUri != EMPTY_BASE_PREFIX) { - prefixes.put(getFreshPrefix(), this.baseUri); - } - - this.baseUri = base; - } - - @Override - public String getPrefix(String prefix) throws PrefixDeclarationException { - if (!prefixes.containsKey(prefix)) { - throw new PrefixDeclarationException("Prefix \"" + prefix + "\" cannot be resolved (not declared yet)."); - } - return prefixes.get(prefix); - } - - @Override - public void setPrefix(String prefix, String iri) { - String prefixName = prefixes.containsKey(prefix) ? getFreshPrefix() : prefix; - prefixes.put(prefixName, iri); - } - - @Override - public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - int colon = prefixedName.indexOf(":"); - String prefix = prefixedName.substring(0, colon + 1); - String suffix = prefixedName.substring(colon + 1); - - return getPrefix(prefix) + suffix; - } - - /** - * Turn an absolute Iri into a (possibly) prefixed name. Dual to - * {@link resolvePrefixedName}. - * - * @param iri an absolute Iri to abbreviate. - * - * @return an abbreviated form of {@code iri} if an appropriate prefix is known, - * or {@code iri}. - */ - public String unresolveAbsoluteIri(String iri) { - Map matches = new HashMap<>(); - - if (baseUri != EMPTY_BASE_PREFIX && iri.startsWith(baseUri) && !iri.equals(baseUri)) { - matches.put(iri.replaceFirst(baseUri, ""), baseUri.length()); - } - - prefixes.forEach((prefixName, prefixIri) -> { - // only select proper prefixes here, since `eg:` is not a valid prefixed name. - if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { - matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); - } - }); - - List matchesByLength = new ArrayList<>(matches.keySet()); - // reverse order, so we get the longest match first - matchesByLength.sort(Comparator.comparing(matches::get).reversed()); - - if (matchesByLength.size() > 0) { - return matchesByLength.get(0); - } else { - // no matching prefix - return iri; - } - } - - @Override - public String absolutize(String iri) { - URI relative = URI.create(iri); - - if (relative.isAbsolute()) { - return iri; - } else { - return getBase() + iri; - } - } - - @Override - public Iterator iterator() { - return this.prefixes.keySet().iterator(); - } - - /** - * Merge another set of prefix declarations. - * - * @param other the set of prefix declarations to merge. Conflicting prefixes - * will be renamed. - * - * @return this - */ - public MergeablePrefixDeclarations mergePrefixDeclarations(final PrefixDeclarations other) { - this.setBase(other.getBase()); - - for (String prefixName : other) { - String iri; - try { - iri = other.getPrefix(prefixName); - } catch (PrefixDeclarationException e) { - // this shouldn't throw, since we already know that prefix is defined. - throw new RuntimeException(e); - } - - this.prefixes.put(prefixName, iri); - } - - return this; - } - - private String getFreshPrefix() { - for (long idx = nextIndex; true; ++idx) { - String freshPrefix = GENERATED_PREFIX_PREFIX + idx + ":"; - - if (!prefixes.containsKey(freshPrefix)) { - this.nextIndex = idx + 1; - return freshPrefix; - } - } - } -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java new file mode 100644 index 000000000..ac832aae5 --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -0,0 +1,151 @@ +package org.semanticweb.vlog4j.core.model.implementation; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; + +/** + * Implementation of {@link PrefixDeclarationRegistry} that is suitable for + * incrementally parsing from multiple sources. When trying to merge in + * conflicting prefix declarations, a fresh non-conflicting prefix is generated + * instead. + * + * @author Maximilian Marx + */ +final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclarationRegistry { + /** + * Next index to use for generated prefix names. + */ + private long nextIndex = 0; + + /** + * Prefix string to use for generated prefix name + */ + private static final String GENERATED_PREFIX_PREFIX_STRING = "vlog4j_generated_"; + + public MergingPrefixDeclarationRegistry() { + super(); + } + + public MergingPrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDeclarations) { + super(); + mergePrefixDeclarations(prefixDeclarations); + } + + /** + * Sets the base namespace to the given value. If a base Iri has already been + * set, it will be added as a prefix declaration with a fresh prefixName. + * + * @param baseIri the new base namespace. + */ + @Override + public void setBaseIri(String baseIri) { + if (baseIri != this.baseUri && this.baseUri != PrefixDeclarationRegistry.EMPTY_BASE) { + prefixes.put(getFreshPrefix(), this.baseUri); + } + + this.baseUri = baseIri; + } + + /** + * Registers a prefix declaration. If prefixName is already registered, a + * freshly generated name will be used instead. + * + * @param prefixName the name of the prefix. + * @param prefixIri the IRI of the prefix. + */ + @Override + public void setPrefixIri(String prefixName, String prefixIri) { + String name = prefixes.containsKey(prefixName) ? getFreshPrefix() : prefixName; + prefixes.put(name, prefixIri); + } + + /** + * Turn an absolute Iri into a (possibly) prefixed name. Dual to + * {@link AbstractPrefixDeclarationRegistry#resolvePrefixedName}. + * + * @param iri an absolute Iri to abbreviate. + * + * @return an abbreviated form of {@code iri} if an appropriate prefix is known, + * or {@code iri}. + */ + public String unresolveAbsoluteIri(String iri) { + Map matches = new HashMap<>(); + + if (baseUri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseUri) && !iri.equals(baseUri)) { + matches.put(iri.replaceFirst(baseUri, PrefixDeclarationRegistry.EMPTY_BASE), baseUri.length()); + } + + prefixes.forEach((prefixName, prefixIri) -> { + // only select proper prefixes here, since `eg:` is not a valid prefixed name. + if (iri.startsWith(prefixIri) && !iri.equals(prefixIri)) { + matches.put(iri.replaceFirst(prefixIri, prefixName), prefixIri.length()); + } + }); + + List matchesByLength = new ArrayList<>(matches.keySet()); + // reverse order, so we get the longest match first + matchesByLength.sort(Comparator.comparing(matches::get).reversed()); + + if (matchesByLength.size() > 0) { + return matchesByLength.get(0); + } else { + // no matching prefix + return iri; + } + } + + /** + * Merge another set of prefix declarations. + * + * @param other the set of prefix declarations to merge. Conflicting prefixes + * from {@code other} will be renamed. + * + * @return this + */ + public MergingPrefixDeclarationRegistry mergePrefixDeclarations(final PrefixDeclarationRegistry other) { + this.setBaseIri(other.getBaseIri()); + + for (Entry prefix : other) { + setPrefixIri(prefix.getKey(), prefix.getValue()); + } + + return this; + } + + private String getFreshPrefix() { + for (long idx = nextIndex; true; ++idx) { + String freshPrefix = GENERATED_PREFIX_PREFIX_STRING + idx + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; + + if (!prefixes.containsKey(freshPrefix)) { + this.nextIndex = idx + 1; + return freshPrefix; + } + } + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index 8d0325fd1..ea03e05c3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -1,6 +1,7 @@ package org.semanticweb.vlog4j.core.model.implementation; import java.util.List; +import java.util.Map.Entry; import java.util.function.Function; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; @@ -36,7 +37,7 @@ import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; @@ -157,7 +158,7 @@ public static String getFactString(final Fact fact) { * Creates a String representation of a given {@link Constant}. * * @see Rule syntax . - * @param constant a {@link Constant} + * @param constant a {@link Constant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given {@link Constant}. */ @@ -195,16 +196,16 @@ public static String getConstantName(final LanguageStringConstant languageString * * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} - * @param iriTransformer a function to transform IRIs with. + * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given * {@link DatatypeConstant}. */ public static String getString(final DatatypeConstant datatypeConstant, Function iriTransformer) { - if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_STRING)) { + if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_STRING)) { return getString(datatypeConstant.getLexicalValue()); - } else if (datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DECIMAL) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_INTEGER) - || datatypeConstant.getDatatype().equals(PrefixDeclarations.XSD_DOUBLE)) { + } else if (datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DECIMAL) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) + || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { return datatypeConstant.getLexicalValue(); } else { return getConstantName(datatypeConstant, iriTransformer); @@ -233,9 +234,10 @@ public static String getString(final DatatypeConstant datatypeConstant) { * @return String representation corresponding to a given * {@link DatatypeConstant}. */ - public static String getConstantName(final DatatypeConstant datatypeConstant, Function iriTransformer) { + public static String getConstantName(final DatatypeConstant datatypeConstant, + Function iriTransformer) { return getString(datatypeConstant.getLexicalValue()) + DOUBLE_CARET - + getIRIString(datatypeConstant.getDatatype(), iriTransformer); + + getIRIString(datatypeConstant.getDatatype(), iriTransformer); } /** @@ -470,26 +472,18 @@ public static String getBaseString(KnowledgeBase knowledgeBase) { return ""; } - return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + "\n"; + return BASE + addAngleBrackets(baseIri) + STATEMENT_SEPARATOR + NEW_LINE; } - public static String getPrefixString(String prefixName, String prefixIri) { - return PREFIX + prefixName + " " + addAngleBrackets(prefixIri) + STATEMENT_SEPARATOR + "\n"; + public static String getPrefixString(Entry prefix) { + return PREFIX + prefix.getKey() + " " + addAngleBrackets(prefix.getValue()) + STATEMENT_SEPARATOR + NEW_LINE; } public static String getBaseAndPrefixDeclarations(KnowledgeBase knowledgeBase) { StringBuilder sb = new StringBuilder(); sb.append(getBaseString(knowledgeBase)); - - knowledgeBase.getPrefixes().forEachRemaining((String prefixName) -> { - try { - sb.append(getPrefixString(prefixName, knowledgeBase.getPrefix(prefixName))); - } catch (PrefixDeclarationException e) { - // this shouldn't throw, since we're iterating over known prefixes. - throw new RuntimeException(e); - } - }); + knowledgeBase.getPrefixes().forEachRemaining(prefix -> sb.append(getPrefixString(prefix))); return sb.toString(); } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java index 92d3b779c..da8211d95 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java @@ -14,18 +14,20 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.Map.Entry; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.exceptions.VLog4jException; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; /*- * #%L @@ -174,7 +176,7 @@ public Void visit(final DataSourceDeclaration statement) { * base. We try to preserve user-provided prefixes found in files when loading * data. */ - private MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); + private MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); /** * Index structure that organises all facts by their predicate. @@ -450,12 +452,12 @@ Map> getFactsByPredicate() { * This is essentially * {@link org.semanticweb.vlog4j.parser.RuleParser#parseInto}, but we need to * avoid a circular dependency here -- this is also why we throw - * {@link Exception} instead of + * {@link VLog4jException} instead of * {@link org.semanticweb.vlog4j.parser.ParsingException}. */ @FunctionalInterface public interface AdditionalInputParser { - KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, Exception; + KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; } /** @@ -468,12 +470,12 @@ public interface AdditionalInputParser { * @throws IOException when reading {@code file} fails * @throws IllegalArgumentException when {@code file} is null or has already * been imported - * @throws RuntimeException when parseFunction throws + * @throws VLog4jException when parseFunction throws VLog4jException * * @return this */ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunction) - throws Exception, IOException, IllegalArgumentException { + throws VLog4jException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); @@ -485,7 +487,7 @@ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunct } /** - * Merge {@link PrefixDeclarations} into this knowledge base. + * Merge {@link PrefixDeclarationRegistry} into this knowledge base. * * @param prefixDeclarations the prefix declarations to merge. Conflicting * prefix names in {@code prefixDeclarations} will be @@ -493,7 +495,7 @@ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunct * * @return this */ - public KnowledgeBase mergePrefixDeclarations(PrefixDeclarations prefixDeclarations) { + public KnowledgeBase mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarations) { this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); return this; @@ -505,7 +507,7 @@ public KnowledgeBase mergePrefixDeclarations(PrefixDeclarations prefixDeclaratio * @return the base IRI, if declared, or {@code ""} otherwise. */ public String getBase() { - return this.prefixDeclarations.getBase(); + return this.prefixDeclarations.getBaseIri(); } /* @@ -513,7 +515,7 @@ public String getBase() { * * @return an iterator over all known prefixes. */ - public Iterator getPrefixes() { + public Iterator> getPrefixes() { return this.prefixDeclarations.iterator(); } @@ -528,7 +530,7 @@ public Iterator getPrefixes() { * @return the declared IRI for {@code prefixName}. */ public String getPrefix(String prefixName) throws PrefixDeclarationException { - return this.prefixDeclarations.getPrefix(prefixName); + return this.prefixDeclarations.getPrefixIri(prefixName); } /* diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java similarity index 54% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java rename to vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java index f515cd6db..de313a6a1 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergeablePrefixDeclarationsTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java @@ -20,19 +20,15 @@ * #L% */ -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - -import java.util.Arrays; +import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; -import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; -public class MergeablePrefixDeclarationsTest { - private MergeablePrefixDeclarations prefixDeclarations; +public class MergingPrefixDeclarationRegistryTest { + private MergingPrefixDeclarationRegistry prefixDeclarations; private static final String BASE = "https://example.org/"; private static final String UNRELATED = "https://example.com/"; @@ -42,39 +38,39 @@ public class MergeablePrefixDeclarationsTest { @Before public void init() { - prefixDeclarations = new MergeablePrefixDeclarations(); + prefixDeclarations = new MergingPrefixDeclarationRegistry(); } @Test - public void setBase_changingBase_succeeds() { - prefixDeclarations.setBase(BASE); - assertEquals(BASE, prefixDeclarations.getBase()); - prefixDeclarations.setBase(MORE_SPECIFIC); - assertEquals(MORE_SPECIFIC, prefixDeclarations.getBase()); + public void setBaseIri_changingBase_succeeds() { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); + prefixDeclarations.setBaseIri(MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getBaseIri()); } @Test - public void setBase_redeclareSameBase_succeeds() { - prefixDeclarations.setBase(BASE); - assertEquals(BASE, prefixDeclarations.getBase()); - prefixDeclarations.setBase(BASE); - assertEquals(BASE, prefixDeclarations.getBase()); + public void setBaseIri_redeclareSameBase_succeeds() { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); } @Test - public void absolutize_noBase_identical() { - assertEquals(RELATIVE, prefixDeclarations.absolutize(RELATIVE)); + public void absolutizeIri_noBase_identical() throws PrefixDeclarationException { + assertEquals(RELATIVE, prefixDeclarations.absolutizeIri(RELATIVE)); } @Test - public void absolutize_base_absoluteIri() { - prefixDeclarations.setBase(BASE); - assertEquals(BASE + RELATIVE, prefixDeclarations.absolutize(RELATIVE)); + public void absolutizeIri_base_absoluteIri() throws PrefixDeclarationException { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE + RELATIVE, prefixDeclarations.absolutizeIri(RELATIVE)); } @Test - public void absolutize_absoluteIri_identical() { - assertEquals(BASE, prefixDeclarations.absolutize(BASE)); + public void absolutizeIri_absoluteIri_identical() throws PrefixDeclarationException { + assertEquals(BASE, prefixDeclarations.absolutizeIri(BASE)); } @Test(expected = PrefixDeclarationException.class) @@ -84,51 +80,42 @@ public void resolvePrefixedName_undeclaredPrefix_throws() throws PrefixDeclarati @Test public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals(BASE + RELATIVE, prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE)); } @Test public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws PrefixDeclarationException { String prefix = "eg:"; - prefixDeclarations.setPrefix(prefix, BASE); + prefixDeclarations.setPrefixIri(prefix, BASE); String resolved = BASE + RELATIVE; String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved); assertEquals(resolved, prefixDeclarations.resolvePrefixedName(unresolved)); } @Test - public void setPrefix_redeclarePrefix_succeeds() throws PrefixDeclarationException { - prefixDeclarations.setPrefix("eg:", BASE); - prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); - assertEquals(BASE, prefixDeclarations.getPrefix("eg:")); + public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); } @Test public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { String prefix = "vlog4j_generated_"; - prefixDeclarations.setPrefix(prefix + "0:", BASE + "generated/"); - prefixDeclarations.setPrefix("eg:", BASE); - prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); + prefixDeclarations.setPrefixIri(prefix + "0:", BASE + "generated/"); + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); - assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix(prefix + "1:")); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri(prefix + "1:")); } @Test - public void mergeablePrefixDeclarations_constructor_succeeds() throws PrefixDeclarationException { - this.prefixDeclarations.setPrefix("eg:", MORE_SPECIFIC); - MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(this.prefixDeclarations); - assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefix("eg:")); - } - - @Test(expected = RuntimeException.class) - public void mergePrefixDeclarations_getPrefixUnexpectedlyThrows_throws() throws PrefixDeclarationException { - PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); - - when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:", "ex:").iterator()); - when(prefixDeclarations.getPrefix(anyString())).thenThrow(PrefixDeclarationException.class); - - this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + public void mergingPrefixDeclarationRegistry_constructor_succeeds() throws PrefixDeclarationException { + this.prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry( + this.prefixDeclarations); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri("eg:")); } @Test @@ -139,49 +126,49 @@ public void unresolveAbsoluteIri_default_identical() { @Test public void unresolveAbsoluteIri_declaredPrefix_succeeds() { assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedPrefix_identical() { - prefixDeclarations.setPrefix("eg:", UNRELATED); + prefixDeclarations.setPrefixIri("eg:", UNRELATED); assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { - prefixDeclarations.setPrefix("ex:", UNRELATED); - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("ex:", UNRELATED); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC)); } @Test public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { - prefixDeclarations.setPrefix("eg:", BASE); - prefixDeclarations.setPrefix("ex:", MORE_SPECIFIC); + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("ex:", MORE_SPECIFIC); assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); - prefixDeclarations.setPrefix("er:", EVEN_MORE_SPECIFIC); + prefixDeclarations.setPrefixIri("er:", EVEN_MORE_SPECIFIC); assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test public void unresolveAbsoluteIri_exactPrefixMatch_identical() { - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); } @Test public void unresolveAbsoluteIri_baseIsMoreSpecific_baseWins() { - prefixDeclarations.setBase(MORE_SPECIFIC); - prefixDeclarations.setPrefix("eg:", BASE); + prefixDeclarations.setBaseIri(MORE_SPECIFIC); + prefixDeclarations.setPrefixIri("eg:", BASE); assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE)); } @Test public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws PrefixDeclarationException { String prefix = "eg:"; - prefixDeclarations.setPrefix(prefix, BASE); + prefixDeclarations.setPrefixIri(prefix, BASE); String unresolved = prefix + RELATIVE; String resolved = prefixDeclarations.resolvePrefixedName(unresolved); assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java index aed7efd89..5093d5116 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java @@ -26,7 +26,7 @@ import org.junit.Test; import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.TermType; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -97,7 +97,7 @@ public void abstractConstantGetterTest() { @Test public void datatypeConstantGetterTest() { - DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); + DatatypeConstant c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); assertEquals("c", c.getLexicalValue()); assertEquals("http://www.w3.org/2001/XMLSchema#string", c.getDatatype()); assertEquals("\"c\"^^", c.getName()); @@ -142,7 +142,7 @@ public void abstractConstantToStringTest() { @Test public void datatypeConstantToStringTest() { - DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarations.XSD_STRING); + DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); assertEquals("\"c\"", c.toString()); } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index 29663ac17..ce862f09b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -30,7 +30,7 @@ import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.MergeablePrefixDeclarations; +import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; public class KnowledgeBaseTest { @@ -94,7 +94,7 @@ public void testDoRemoveStatementInexistentPredicate() { @Test public void getBase_default_hasEmptyBase() { - assertEquals(this.kb.getBase(), ""); + assertEquals("", this.kb.getBase()); } @Test(expected = PrefixDeclarationException.class) @@ -110,8 +110,8 @@ public void resolvePrefixedName_defaultUndeclaredPrefix_throws() throws PrefixDe @Test public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationException { String iri = "https://example.org/"; - MergeablePrefixDeclarations prefixDeclarations = new MergeablePrefixDeclarations(); - prefixDeclarations.setPrefix("ex:", iri); + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setPrefixIri("ex:", iri); this.kb.mergePrefixDeclarations(prefixDeclarations); assertEquals(this.kb.getPrefix("ex:"), iri); assertEquals(this.kb.resolvePrefixedName("ex:test"), iri + "test"); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 036335328..6dff4c70f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -6,12 +6,17 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.Before; import org.junit.Test; +import org.mockito.internal.util.collections.Sets; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; import org.semanticweb.vlog4j.core.model.api.Conjunction; @@ -19,7 +24,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.UniversalVariable; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -97,10 +102,12 @@ public void writeInferences_example_succeeds() throws IOException { @Test public void writeInferences_withPrefixDeclarations_abbreviatesIris() throws IOException, PrefixDeclarationException { - PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); - when(prefixDeclarations.getBase()).thenReturn(""); - when(prefixDeclarations.getPrefix(eq("eg:"))).thenReturn("http://example.org/"); - when(prefixDeclarations.iterator()).thenReturn(Arrays.asList("eg:").iterator()); + PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); + Map prefixMap = new HashMap<>(); + prefixMap.put("eg:", "http://example.org/"); + when(prefixDeclarations.getBaseIri()).thenReturn(""); + when(prefixDeclarations.getPrefixIri(eq("eg:"))).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(prefixMap.entrySet().iterator()); kb.mergePrefixDeclarations(prefixDeclarations); assertEquals(11, getInferences().size()); @@ -109,9 +116,9 @@ public void writeInferences_withPrefixDeclarations_abbreviatesIris() @Test public void writeInferences_withBase_writesBase() throws IOException, PrefixDeclarationException { - PrefixDeclarations prefixDeclarations = mock(PrefixDeclarations.class); - when(prefixDeclarations.getBase()).thenReturn("http://example.org/"); - when(prefixDeclarations.iterator()).thenReturn(Arrays.asList().iterator()); + PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); + when(prefixDeclarations.getBaseIri()).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(new HashMap().entrySet().iterator()); kb.mergePrefixDeclarations(prefixDeclarations); assertEquals(11, getInferences().size()); diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java index b4c9cb4b3..69d772f70 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.NoSuchElementException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; @@ -207,15 +207,15 @@ default ParserConfiguration getParserConfiguration(SubParserFactory subParserFac } /** - * Obtain {@link PrefixDeclarations} from a {@link SubParserFactory}. + * Obtain {@link PrefixDeclarationRegistry} from a {@link SubParserFactory}. * * @param subParserFactory the SubParserFactory. * * @return the prefix declarations. */ - default PrefixDeclarations getPrefixDeclarations(SubParserFactory subParserFactory) { + default PrefixDeclarationRegistry getPrefixDeclarationRegistry(SubParserFactory subParserFactory) { JavaCCParser subParser = subParserFactory.makeSubParser(""); - return subParser.getPrefixDeclarations(); + return subParser.getPrefixDeclarationRegistry(); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java new file mode 100644 index 000000000..42e014cae --- /dev/null +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java @@ -0,0 +1,94 @@ +package org.semanticweb.vlog4j.parser; + +/*- + * #%L + * vlog4j-syntax + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.vlog4j.core.model.implementation.AbstractPrefixDeclarationRegistry; + +/** + * Implementation of {@link PrefixDeclarationRegistry} that is used when parsing + * data from a single source. In this case, attempts to re-declare prefixes or + * the base IRI will lead to errors. + * + * @author Markus Kroetzsch + * + */ +final public class LocalPrefixDeclarationRegistry extends AbstractPrefixDeclarationRegistry { + + /** + * Fallback IRI to use as base IRI if none is set. + */ + private String fallbackIri; + + public LocalPrefixDeclarationRegistry() { + this(PrefixDeclarationRegistry.EMPTY_BASE); // empty string encodes: "no base" (use relative IRIs) + } + + /** + * + */ + public LocalPrefixDeclarationRegistry(String fallbackIri) { + super(); + this.fallbackIri = fallbackIri; + this.baseUri = null; + } + + /** + * Returns the relevant base namespace. Returns the fallback IRI if no base + * namespace has been set yet. + * + * @return string of an absolute base IRI + */ + @Override + public String getBaseIri() { + if (this.baseUri == null) { + this.baseUri = this.fallbackIri; + } + return baseUri.toString(); + } + + @Override + public void setPrefixIri(String prefixName, String prefixIri) throws PrefixDeclarationException { + if (prefixes.containsKey(prefixName)) { + throw new PrefixDeclarationException("Prefix \"" + prefixName + "\" is already defined as <" + + prefixes.get(prefixName) + ">. It cannot be redefined to mean <" + prefixIri + ">."); + } + + prefixes.put(prefixName, prefixIri); + } + + /** + * Sets the base namespace to the given value. This should only be done once, + * and not after the base namespace was assumed to be an implicit default value. + * + * @param baseIri the new base namespace + * @throws PrefixDeclarationException if base was already defined + */ + + @Override + public void setBaseIri(String baseUri) throws PrefixDeclarationException { + if (this.baseUri != null) + throw new PrefixDeclarationException( + "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); + this.baseUri = baseUri; + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java deleted file mode 100644 index 8bc8a93b6..000000000 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarations.java +++ /dev/null @@ -1,118 +0,0 @@ -package org.semanticweb.vlog4j.parser; - -/*- - * #%L - * vlog4j-parser - * %% - * Copyright (C) 2018 - 2019 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; - -/** - * Implementation of {@link PrefixDeclarations} that is used when parsing data - * from a single source. In this case, attempts to re-declare prefixes or the - * base IRI will lead to errors. - * - * @author Markus Kroetzsch - * - */ -final public class LocalPrefixDeclarations implements PrefixDeclarations { - - Map prefixes = new HashMap<>(); - String baseUri; - String fallbackUri; - - public LocalPrefixDeclarations() { - this(""); // empty string encodes: "no base" (use relative IRIs) - } - - public LocalPrefixDeclarations(String fallbackUri) { - this.fallbackUri = fallbackUri; - } - - public String getBase() { - if (this.baseUri == null) { - this.baseUri = this.fallbackUri; - } - return baseUri.toString(); - } - - public String getPrefix(String prefix) throws PrefixDeclarationException { - if (!prefixes.containsKey(prefix)) { - throw new PrefixDeclarationException("Prefix " + prefix + " cannot be resolved (not declared yet)."); - } - return prefixes.get(prefix).toString(); - } - - public void setPrefix(String prefix, String uri) throws PrefixDeclarationException { - if (prefixes.containsKey(prefix)) { - throw new PrefixDeclarationException("Prefix " + prefix + " is already defined as <" + prefixes.get(prefix) - + ">. It cannot be redefined to mean <" + uri + ">."); - } - - prefixes.put(prefix, uri); - } - - public void setBase(String baseUri) throws PrefixDeclarationException { - if (this.baseUri != null) - throw new PrefixDeclarationException( - "Base is already defined as <" + this.baseUri + "> and cannot be re-defined as " + baseUri); - this.baseUri = baseUri; - } - - public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - // from the parser we know that prefixedName is of the form: - // prefix:something - // remember that the prefixes are stored with the colon symbol - // This does not return the surrounding angle brackets <> - - int idx = prefixedName.indexOf(":") + 1; - String prefix = prefixedName.substring(0, idx); - String suffix = prefixedName.substring(idx); - - return getPrefix(prefix) + suffix; - } - - public String absolutize(String iri) throws PrefixDeclarationException { - URI relative; - - try { - relative = new URI(iri); - } catch (URISyntaxException e) { - throw new PrefixDeclarationException("Failed to parse IRI", e); - } - - if (relative.isAbsolute()) { - return iri; - } else { - return getBase() + iri; - } - } - - @Override - public Iterator iterator() { - return this.prefixes.keySet().iterator(); - } - -} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java index 4015cb83b..861781378 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java @@ -28,7 +28,7 @@ import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -134,7 +134,7 @@ public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(final Strin * @return the {@link Constant} corresponding to the given arguments. */ public Constant parseDatatypeConstant(final String lexicalForm, final String datatype) throws ParsingException { - final String type = ((datatype != null) ? datatype : PrefixDeclarations.XSD_STRING); + final String type = ((datatype != null) ? datatype : PrefixDeclarationRegistry.XSD_STRING); final DatatypeConstantHandler handler = this.datatypes.get(type); if (handler != null) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java index 9261af3f9..73625b184 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParsingException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,17 +20,11 @@ * #L% */ -public class ParsingException extends Exception { +import org.semanticweb.vlog4j.core.exceptions.VLog4jException; - /** - * - */ +public class ParsingException extends VLog4jException { private static final long serialVersionUID = 2849123381757026724L; - public ParsingException() { - super(); - } - public ParsingException(String message) { super(message); } @@ -46,5 +40,4 @@ public ParsingException(String message, Throwable cause) { public ParsingException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } - } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index e7784822d..2fab78fd9 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -30,7 +30,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; @@ -61,8 +61,8 @@ public static void parseInto(final KnowledgeBase knowledgeBase, final InputStrea final JavaCCParser parser = new JavaCCParser(stream, encoding); if (baseIri != null) { - PrefixDeclarations prefixDeclarations = new LocalPrefixDeclarations(baseIri); - parser.setPrefixDeclarations(prefixDeclarations); + PrefixDeclarationRegistry prefixDeclarationRegistry = new LocalPrefixDeclarationRegistry(baseIri); + parser.setPrefixDeclarationRegistry(prefixDeclarationRegistry); } parser.setKnowledgeBase(knowledgeBase); @@ -261,7 +261,7 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException } KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); - return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarations()); + return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarationRegistry()); } protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 680c498e6..2ac3216d1 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -24,7 +24,7 @@ import java.io.InputStream; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.DirectiveHandler; @@ -40,17 +40,17 @@ */ public class ImportFileRelativeDirectiveHandler implements DirectiveHandler { @Override - public KnowledgeBase handleDirective(List arguments, final SubParserFactory subParserFactory) + public KnowledgeBase handleDirective(List arguments, SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - PrefixDeclarations prefixDeclarations = getPrefixDeclarations(subParserFactory); + PrefixDeclarationRegistry prefixDeclarationRegistry = getPrefixDeclarationRegistry(subParserFactory); File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file"); KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); try { return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { - RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarations.getBase()); + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarationRegistry.getBaseIri()); return kb; }); } catch (Exception e) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj index 2043e9c7d..1440a9fd2 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParser.jj @@ -60,7 +60,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; @@ -318,9 +318,9 @@ Term term(FormulaContext context) throws PrefixDeclarationException : { Constant NumericLiteral() : { Token t; } { - t = < INTEGER > { return createConstant(t.image, PrefixDeclarations.XSD_INTEGER); } - | t = < DECIMAL > { return createConstant(t.image, PrefixDeclarations.XSD_DECIMAL); } - | t = < DOUBLE > { return createConstant(t.image, PrefixDeclarations.XSD_DOUBLE); } + t = < INTEGER > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_INTEGER); } + | t = < DECIMAL > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_DECIMAL); } + | t = < DOUBLE > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_DOUBLE); } } Constant RDFLiteral() throws PrefixDeclarationException : { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index 02e9e35f6..ac977898e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -33,7 +33,7 @@ import org.semanticweb.vlog4j.core.model.api.DataSource; import org.semanticweb.vlog4j.core.model.api.NamedNull; import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; @@ -42,7 +42,7 @@ import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; -import org.semanticweb.vlog4j.parser.LocalPrefixDeclarations; +import org.semanticweb.vlog4j.parser.LocalPrefixDeclarationRegistry; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.ParsingException; @@ -62,7 +62,7 @@ * */ public class JavaCCParserBase { - private PrefixDeclarations prefixDeclarations; + private PrefixDeclarationRegistry prefixDeclarationRegistry; private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; @@ -135,14 +135,14 @@ public enum ConfigurableLiteralDelimiter { public JavaCCParserBase() { this.knowledgeBase = new KnowledgeBase(); - this.prefixDeclarations = new LocalPrefixDeclarations(); + this.prefixDeclarationRegistry = new LocalPrefixDeclarationRegistry(); this.parserConfiguration = new DefaultParserConfiguration(); } AbstractConstant createConstant(String lexicalForm) throws ParseException { String absoluteIri; try { - absoluteIri = prefixDeclarations.absolutize(lexicalForm); + absoluteIri = absolutizeIri(lexicalForm); } catch (PrefixDeclarationException e) { throw makeParseExceptionWithCause("Failed to parse IRI", e); } @@ -334,12 +334,12 @@ void setNamedNullNamespace(byte[] namedNullNamespace) { this.namedNullNamespace = namedNullNamespace; } - public void setPrefixDeclarations(PrefixDeclarations prefixDeclarations) { - this.prefixDeclarations = prefixDeclarations; + public void setPrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.prefixDeclarationRegistry = prefixDeclarationRegistry; } - public PrefixDeclarations getPrefixDeclarations() { - return prefixDeclarations; + public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { + return this.prefixDeclarationRegistry; } DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(String syntacticForm, @@ -376,18 +376,18 @@ boolean isParsingOfNamedNullsAllowed() { } void setBase(String baseIri) throws PrefixDeclarationException { - prefixDeclarations.setBase(baseIri); + prefixDeclarationRegistry.setBaseIri(baseIri); } void setPrefix(String prefixName, String baseIri) throws PrefixDeclarationException { - prefixDeclarations.setPrefix(prefixName, baseIri); + prefixDeclarationRegistry.setPrefixIri(prefixName, baseIri); } String absolutizeIri(String iri) throws PrefixDeclarationException { - return prefixDeclarations.absolutize(iri); + return prefixDeclarationRegistry.absolutizeIri(iri); } String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - return prefixDeclarations.resolvePrefixedName(prefixedName); + return prefixDeclarationRegistry.resolvePrefixedName(prefixedName); } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 1f1269898..2332afa83 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -24,7 +24,7 @@ import java.io.InputStream; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.RuleParser; @@ -40,7 +40,7 @@ public class SubParserFactory { private final KnowledgeBase knowledgeBase; private final ParserConfiguration parserConfiguration; - private final PrefixDeclarations prefixDeclarations; + private final PrefixDeclarationRegistry prefixDeclarationRegistry; private final byte[] namedNullNamespace; /** @@ -50,7 +50,7 @@ public class SubParserFactory { */ SubParserFactory(final JavaCCParser parser) { this.knowledgeBase = parser.getKnowledgeBase(); - this.prefixDeclarations = parser.getPrefixDeclarations(); + this.prefixDeclarationRegistry = parser.getPrefixDeclarationRegistry(); this.parserConfiguration = parser.getParserConfiguration(); this.namedNullNamespace = parser.getNamedNullNamespace(); } @@ -67,7 +67,7 @@ public class SubParserFactory { public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { final JavaCCParser subParser = new JavaCCParser(inputStream, encoding); subParser.setKnowledgeBase(this.knowledgeBase); - subParser.setPrefixDeclarations(this.prefixDeclarations); + subParser.setPrefixDeclarationRegistry(this.prefixDeclarationRegistry); subParser.setParserConfiguration(this.parserConfiguration); subParser.setNamedNullNamespace(this.namedNullNamespace); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java index e2a56b1ce..b09d918f9 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserParseFactTest.java @@ -27,7 +27,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.implementation.Expressions; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; @@ -37,8 +37,8 @@ public class RuleParserParseFactTest implements ParserTestUtils { - private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarations.XSD_STRING); - private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarations.XSD_STRING); + private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarationRegistry.XSD_STRING); + private final Constant b = Expressions.makeDatatypeConstant("b", PrefixDeclarationRegistry.XSD_STRING); private final Fact factA = Expressions.makeFact("p", a); private final Fact factAB = Expressions.makeFact("p", a, b); diff --git a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java index 25243d0cb..f6b143182 100644 --- a/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java +++ b/vlog4j-parser/src/test/java/org/semanticweb/vlog4j/parser/RuleParserTest.java @@ -34,7 +34,7 @@ import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Literal; import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Rule; import org.semanticweb.vlog4j.core.model.api.Statement; import org.semanticweb.vlog4j.core.model.api.Term; @@ -51,7 +51,7 @@ public class RuleParserTest implements ParserTestUtils { private final Variable z = Expressions.makeUniversalVariable("Z"); private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); private final Constant d = Expressions.makeAbstractConstant("http://example.org/d"); - private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarations.XSD_STRING); + private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", x, c); private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", x, c); private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", x, z); @@ -188,24 +188,24 @@ public void testNoDollarVariables() throws ParsingException { public void testIntegerLiteral() throws ParsingException { String input = "p(42)"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @Test public void testAbbreviatedIntegerLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; + String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(Arrays.asList(integerLiteral), statements); } @Test public void testFullIntegerLiteral() throws ParsingException { - String input = "p(\"42\"^^<" + PrefixDeclarations.XSD_INTEGER + "> )"; + String input = "p(\"42\"^^<" + PrefixDeclarationRegistry.XSD_INTEGER + "> )"; PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("42", PrefixDeclarations.XSD_INTEGER)); + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); assertEquals(integerLiteral, RuleParser.parseLiteral(input)); } @@ -213,7 +213,7 @@ public void testFullIntegerLiteral() throws ParsingException { public void testDecimalLiteral() throws ParsingException { String input = "p(-5.0)"; PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("-5.0", PrefixDeclarations.XSD_DECIMAL)); + Expressions.makeDatatypeConstant("-5.0", PrefixDeclarationRegistry.XSD_DECIMAL)); assertEquals(decimalLiteral, RuleParser.parseLiteral(input)); } @@ -221,7 +221,7 @@ public void testDecimalLiteral() throws ParsingException { public void testDoubleLiteral() throws ParsingException { String input = "p(4.2E9)"; PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarations.XSD_DOUBLE)); + Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarationRegistry.XSD_DOUBLE)); assertEquals(doubleLiteral, RuleParser.parseLiteral(input)); } @@ -241,14 +241,14 @@ public void testIncompleteStringLiteral() throws ParsingException { public void parseLiteral_escapeSequences_succeeds() throws ParsingException { String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); } @@ -257,14 +257,14 @@ public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); } @@ -284,14 +284,14 @@ public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingE public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(input)); } @Test public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { PositiveLiteral fact = Expressions.makePositiveLiteral("p", - Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarations.XSD_STRING)); + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); assertEquals(fact, RuleParser.parseLiteral(fact.getSyntacticRepresentation())); } @@ -322,7 +322,7 @@ public void testUnicodeUri() throws ParsingException { @Test public void testPrefixedLiteral() throws ParsingException { - String input = "@prefix xsd: <" + PrefixDeclarations.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; + String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); assertEquals(Arrays.asList(fact2), statements); } diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java index dfcbc9090..361da7991 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java +++ b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java @@ -26,7 +26,7 @@ import org.openrdf.model.Value; import org.openrdf.model.datatypes.XMLDatatypeUtil; import org.openrdf.rio.ntriples.NTriplesUtil; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; @@ -68,7 +68,7 @@ static Term rdfLiteralToConstant(final Literal literal) { } else if (literal.getLanguage() != null) { return new LanguageStringConstantImpl(literal.getLabel(), literal.getLanguage()); } else { - return new DatatypeConstantImpl(literal.getLabel(), PrefixDeclarations.XSD_STRING); + return new DatatypeConstantImpl(literal.getLabel(), PrefixDeclarationRegistry.XSD_STRING); } } diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java index 1b61bc02c..b5ab85281 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java +++ b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java @@ -41,7 +41,7 @@ import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarations; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.api.Constant; import org.semanticweb.vlog4j.core.model.api.Fact; import org.semanticweb.vlog4j.core.model.api.Term; @@ -178,7 +178,7 @@ public void testCollectionsPreserved() throws RDFHandlerException, RDFParseExcep Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file2, fileA, blank1)), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(blank1, RDF_FIRST, - Expressions.makeDatatypeConstant("1", PrefixDeclarations.XSD_INTEGER))), + Expressions.makeDatatypeConstant("1", PrefixDeclarationRegistry.XSD_INTEGER))), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(blank1, RDF_REST, RDF_NIL)), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file3, fileA, blank2)), Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, From 3c32a6b1d7edbe20952864e82e383200aed39f93 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 28 Feb 2020 20:03:43 +0100 Subject: [PATCH 0814/1255] Address review comments --- .../PrefixDeclarationException.java | 2 +- .../core/model/api/ExistentialVariable.java | 7 -- .../model/api/PrefixDeclarationRegistry.java | 2 +- .../vlog4j/core/model/api/Term.java | 2 - .../core/model/api/UniversalVariable.java | 7 -- .../vlog4j/core/model/api/Variable.java | 10 +- .../MergingPrefixDeclarationRegistry.java | 8 +- .../implementation/RenamedNamedNull.java | 11 ++- .../core/model/implementation/Serializer.java | 66 +++++++++---- .../vlog4j/core/reasoner/KnowledgeBase.java | 97 +++++++++---------- .../vlog4j/core/reasoner/Reasoner.java | 2 +- .../MergingPrefixDeclarationRegistryTest.java | 12 +++ .../core/reasoner/KnowledgeBaseTest.java | 6 +- .../LocalPrefixDeclarationRegistry.java | 2 +- .../semanticweb/vlog4j/parser/RuleParser.java | 4 +- .../ImportFileDirectiveHandler.java | 5 +- .../ImportFileRelativeDirectiveHandler.java | 5 +- 17 files changed, 136 insertions(+), 112 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java index 78cb523c1..6424a028b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java @@ -21,7 +21,7 @@ */ public class PrefixDeclarationException extends VLog4jException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 787997047134745982L; public PrefixDeclarationException(String errorMessage) { super(errorMessage); diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java index c62b7ce1b..6952de6c3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java @@ -20,8 +20,6 @@ * #L% */ -import java.util.function.Function; - import org.semanticweb.vlog4j.core.model.implementation.Serializer; /** @@ -41,9 +39,4 @@ default TermType getType() { default String getSyntacticRepresentation() { return Serializer.getString(this); } - - @Override - default String getSyntacticRepresentation(Function iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java index ae4c1f6c4..874889211 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java @@ -89,7 +89,7 @@ public interface PrefixDeclarationRegistry extends Iterable iriTransformer) { - return getSyntacticRepresentation(); - } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java index 63b642a93..308843bdf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,8 @@ * #L% */ +import java.util.function.Function; + /** * Interface for variables, i.e., terms of type * {@link TermType#UNIVERSAL_VARIABLE} and @@ -30,4 +32,8 @@ * @author Markus Krötzsch */ public interface Variable extends Term { + @Override + default String getSyntacticRepresentation(Function iriTransformer) { + return getSyntacticRepresentation(); + } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java index ac832aae5..bf1d14f8d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -125,17 +125,13 @@ public String unresolveAbsoluteIri(String iri) { * * @param other the set of prefix declarations to merge. Conflicting prefixes * from {@code other} will be renamed. - * - * @return this */ - public MergingPrefixDeclarationRegistry mergePrefixDeclarations(final PrefixDeclarationRegistry other) { + public void mergePrefixDeclarations(final PrefixDeclarationRegistry other) { this.setBaseIri(other.getBaseIri()); for (Entry prefix : other) { - setPrefixIri(prefix.getKey(), prefix.getValue()); + this.setPrefixIri(prefix.getKey(), prefix.getValue()); } - - return this; } private String getFreshPrefix() { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java index af1861859..ef05b14d2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java @@ -31,11 +31,12 @@ * @author Maximilian Marx */ public class RenamedNamedNull extends NamedNullImpl { - private RenamedNamedNull(String name) { - super(name); - } - + /** + * Construct a new renamed named null, with the given UUID as a name. + * + * @param name the name of the named null. + */ public RenamedNamedNull(UUID name) { - this(name.toString()); + super(name.toString()); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java index ea03e05c3..2d10dc4a5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java @@ -155,23 +155,25 @@ public static String getFactString(final Fact fact) { } /** - * Creates a String representation of a given {@link Constant}. + * Creates a String representation of a given {@link AbstractConstant}. * * @see Rule syntax . - * @param constant a {@link Constant} + * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. - * @return String representation corresponding to a given {@link Constant}. + * @return String representation corresponding to a given + * {@link AbstractConstant}. */ public static String getString(final AbstractConstant constant, Function iriTransformer) { return getIRIString(constant.getName(), iriTransformer); } /** - * Creates a String representation of a given {@link Constant}. + * Creates a String representation of a given {@link AbstractConstant}. * * @see Rule syntax . - * @param constant a {@link Constant} - * @return String representation corresponding to a given {@link Constant}. + * @param constant a {@link AbstractConstant} + * @return String representation corresponding to a given + * {@link AbstractConstant}. */ public static String getString(final AbstractConstant constant) { return getIRIString(constant.getName()); @@ -191,8 +193,20 @@ public static String getConstantName(final LanguageStringConstant languageString } /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} without an IRI. + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. + * + * examples: + *
            + *
          • {@code "string"^^xsd:String} results in {@code "string"},
          • + *
          • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
          • + *
          • {@code "42"^^xsd:Integer} results in {@code 42},
          • + *
          • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
          • + *
          • {@code "test"^^} results in {@code "test"^^}, modulo transformation of the datatype IRI.
          • + *
          * * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} @@ -207,16 +221,26 @@ public static String getString(final DatatypeConstant datatypeConstant, Function || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_INTEGER) || datatypeConstant.getDatatype().equals(PrefixDeclarationRegistry.XSD_DOUBLE)) { return datatypeConstant.getLexicalValue(); - } else { - return getConstantName(datatypeConstant, iriTransformer); } + + return getConstantName(datatypeConstant, iriTransformer); } /** - * Creates a String representation corresponding to the name of a given - * {@link DatatypeConstant} without an IRI. + * Creates a String representation corresponding to the given + * {@link DatatypeConstant}. For datatypes that have specialised lexical + * representations (i.e., xsd:String, xsd:Decimal, xsd:Integer, and xsd:Double), + * this representation is returned, otherwise the result is a generic literal + * with full datatype IRI. * - * @see Rule syntax . + * examples: + *
            + *
          • {@code "string"^^xsd:String} results in {@code "string"},
          • + *
          • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
          • + *
          • {@code "42"^^xsd:Integer} results in {@code 42},
          • + *
          • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
          • + *
          • {@code "test"^^} results in {@code "test"^^}.
          • + *
          * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -376,9 +400,9 @@ private static String getIRIString(final String string, Function if (string.contains(COLON) || string.matches(REGEX_INTEGER) || string.matches(REGEX_DOUBLE) || string.matches(REGEX_DECIMAL) || string.equals(REGEX_TRUE) || string.equals(REGEX_FALSE)) { return addAngleBrackets(string); - } else { - return string; } + + return string; } /** @@ -394,7 +418,7 @@ private static String getIRIString(final String string, Function *
        • {@code \n}
        • *
        • {@code \r}
        • *
        • {@code \f}
        • - *
            + *
          * Example for {@code string = "\\a"}, the returned value is * {@code string = "\"\\\\a\""} * @@ -416,7 +440,7 @@ public static String getString(final String string) { *
        • {@code \n}
        • *
        • {@code \r}
        • *
        • {@code \f}
        • - *
            + *
          * * @param string * @return an escaped string @@ -466,12 +490,12 @@ public static String getString(Predicate predicate, List terms, Function { private final Set listeners = new HashSet<>(); /** - * all (canonical) file paths imported so far, used to prevent cyclic imports. + * All (canonical) file paths imported so far, used to prevent cyclic imports. */ private final Set importedFilePaths = new HashSet<>(); @@ -176,7 +176,7 @@ public Void visit(final DataSourceDeclaration statement) { * base. We try to preserve user-provided prefixes found in files when loading * data. */ - private MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + private MergingPrefixDeclarationRegistry prefixDeclarationRegistry = new MergingPrefixDeclarationRegistry(); /** * Index structure that organises all facts by their predicate. @@ -205,7 +205,6 @@ public void addListener(final KnowledgeBaseListener listener) { */ public void deleteListener(final KnowledgeBaseListener listener) { this.listeners.remove(listener); - } /** @@ -457,7 +456,7 @@ Map> getFactsByPredicate() { */ @FunctionalInterface public interface AdditionalInputParser { - KnowledgeBase parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; + void parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; } /** @@ -471,10 +470,8 @@ public interface AdditionalInputParser { * @throws IllegalArgumentException when {@code file} is null or has already * been imported * @throws VLog4jException when parseFunction throws VLog4jException - * - * @return this */ - public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunction) + public void importRulesFile(File file, AdditionalInputParser parseFunction) throws VLog4jException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); @@ -482,41 +479,40 @@ public KnowledgeBase importRulesFile(File file, AdditionalInputParser parseFunct Validate.isTrue(isNewFile, "file \"" + file.getName() + "\" was already imported."); try (InputStream stream = new FileInputStream(file)) { - return parseFunction.parseInto(stream, this); + parseFunction.parseInto(stream, this); } } /** * Merge {@link PrefixDeclarationRegistry} into this knowledge base. * - * @param prefixDeclarations the prefix declarations to merge. Conflicting - * prefix names in {@code prefixDeclarations} will be - * renamed. - * - * @return this + * @param prefixDeclarationRegistry the prefix declarations to merge. + * Conflicting prefix names in + * {@code prefixDeclarationRegistry} will be + * renamed to some implementation-specific, + * fresh prefix name. */ - public KnowledgeBase mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarations) { - this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); - - return this; + public void mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.prefixDeclarationRegistry.mergePrefixDeclarations(prefixDeclarationRegistry); } /** * Return the base IRI. * - * @return the base IRI, if declared, or {@code ""} otherwise. + * @return the base IRI, if declared, or + * {@link PrefixDeclarationRegistry#EMPTY_BASE} otherwise. */ - public String getBase() { - return this.prefixDeclarations.getBaseIri(); + public String getBaseIri() { + return this.prefixDeclarationRegistry.getBaseIri(); } - /* + /** * Return the declared prefixes. * * @return an iterator over all known prefixes. */ public Iterator> getPrefixes() { - return this.prefixDeclarations.iterator(); + return this.prefixDeclarationRegistry.iterator(); } /** @@ -529,12 +525,13 @@ public Iterator> getPrefixes() { * * @return the declared IRI for {@code prefixName}. */ - public String getPrefix(String prefixName) throws PrefixDeclarationException { - return this.prefixDeclarations.getPrefixIri(prefixName); + public String getPrefixIri(String prefixName) throws PrefixDeclarationException { + return this.prefixDeclarationRegistry.getPrefixIri(prefixName); } - /* - * Resolve a prefixed name into an absolute IRI. Dual to unresolveAbsoluteIri. + /** + * Resolve a prefixed name into an absolute IRI. Dual to + * {@link unresolveAbsoluteIri}. * * @param prefixedName the prefixed name to resolve. * @@ -543,12 +540,12 @@ public String getPrefix(String prefixName) throws PrefixDeclarationException { * @return an absolute IRI corresponding to the prefixed name. */ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { - return this.prefixDeclarations.resolvePrefixedName(prefixedName); + return this.prefixDeclarationRegistry.resolvePrefixedName(prefixedName); } /** * Potentially abbreviate an absolute IRI using the declared prefixes. Dual to - * resolvePrefixedName. + * {@link resolvePrefixedName}. * * @param iri the absolute IRI to abbreviate. * @@ -556,6 +553,6 @@ public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationE * declared prefixes, or {@code iri} if no suitable prefix is declared. */ public String unresolveAbsoluteIri(String iri) { - return this.prefixDeclarations.unresolveAbsoluteIri(iri); + return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri); } } diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java index 968c53b51..444d4c615 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java @@ -93,7 +93,7 @@ static Reasoner getInstance() { * Exports all the (explicit and implicit) facts inferred during reasoning of * the knowledge base to an OutputStream. * - * @param an OutputStream for the facts to be written to. + * @param stream an OutputStream for the facts to be written to. * @return the correctness of the query answers, depending on the state of the * reasoning (materialisation) and its {@link KnowledgeBase}. * @throws IOException diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java index de313a6a1..5aad3d706 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java @@ -25,6 +25,7 @@ import org.junit.Before; import org.junit.Test; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; public class MergingPrefixDeclarationRegistryTest { @@ -118,6 +119,17 @@ public void mergingPrefixDeclarationRegistry_constructor_succeeds() throws Prefi assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri("eg:")); } + @Test + public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPrefixName() + throws PrefixDeclarationException { + this.prefixDeclarations.setPrefixIri("eg:", BASE); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + assertEquals(BASE, this.prefixDeclarations.getPrefixIri("eg:")); + assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("vlog4j_generated_0:")); + } + @Test public void unresolveAbsoluteIri_default_identical() { assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE)); diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java index ce862f09b..8e0531a67 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java @@ -94,12 +94,12 @@ public void testDoRemoveStatementInexistentPredicate() { @Test public void getBase_default_hasEmptyBase() { - assertEquals("", this.kb.getBase()); + assertEquals("", this.kb.getBaseIri()); } @Test(expected = PrefixDeclarationException.class) public void getPrefix_defaultUndeclaredPrefix_throws() throws PrefixDeclarationException { - this.kb.getPrefix("ex:"); + this.kb.getPrefixIri("ex:"); } @Test(expected = PrefixDeclarationException.class) @@ -113,7 +113,7 @@ public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationExc MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); prefixDeclarations.setPrefixIri("ex:", iri); this.kb.mergePrefixDeclarations(prefixDeclarations); - assertEquals(this.kb.getPrefix("ex:"), iri); + assertEquals(this.kb.getPrefixIri("ex:"), iri); assertEquals(this.kb.resolvePrefixedName("ex:test"), iri + "test"); assertEquals(this.kb.unresolveAbsoluteIri(iri + "test"), "ex:test"); } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java index 42e014cae..e5e3ddba3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java @@ -80,7 +80,7 @@ public void setPrefixIri(String prefixName, String prefixIri) throws PrefixDecla * Sets the base namespace to the given value. This should only be done once, * and not after the base namespace was assumed to be an implicit default value. * - * @param baseIri the new base namespace + * @param baseUri the new base namespace * @throws PrefixDeclarationException if base was already defined */ diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java index 2fab78fd9..85152fc8a 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/RuleParser.java @@ -261,7 +261,9 @@ static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException } KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); - return knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarationRegistry()); + knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarationRegistry()); + + return knowledgeBase; } protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java index af494d37c..f578a7c7b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileDirectiveHandler.java @@ -48,12 +48,13 @@ public KnowledgeBase handleDirective(List arguments, final Su ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); try { - return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration); - return kb; }); } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } + + return knowledgeBase; } } diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java index 2ac3216d1..ca6610a18 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -49,12 +49,13 @@ public KnowledgeBase handleDirective(List arguments, SubParse ParserConfiguration parserConfiguration = getParserConfiguration(subParserFactory); try { - return knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarationRegistry.getBaseIri()); - return kb; }); } catch (Exception e) { throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); } + + return knowledgeBase; } } From 6b2237713f60405efdaa3bf0ae55189a391f8eeb Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Fri, 28 Feb 2020 20:56:00 +0100 Subject: [PATCH 0815/1255] Core: Drop unused imports --- .../vlog4j/core/reasoner/implementation/VLogReasoner.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java index ebbf50ad1..cdec467b5 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java @@ -1,7 +1,5 @@ package org.semanticweb.vlog4j.core.reasoner.implementation; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; @@ -10,7 +8,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Function; import org.apache.commons.lang3.Validate; import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; From 940c3a4392c77117d62cbc1e306cd7a00b53d611 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 13:09:50 +0100 Subject: [PATCH 0816/1255] Update release notes --- RELEASE-NOTES.md | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 5a1e07412..6fb2310fa 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -6,11 +6,25 @@ VLog4j v0.6.0 Breaking changes: * In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no - longer exist. It can be replaced by + longer exist. It can be replaced by `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` +* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* Rules files may import other rules files using `@import` and + `@import-relative`, where the latter resolves relative IRIs using + the current base IRI, unless the imported file explicitly specifies + a different one. +* Named nulls of the form `_:name` are now allowed during parsing (but + may not occur in rule bodies). They are renamed to assure that they + are distinct on a per-file level. +* The parser allows custom directives to be implemented, and a certain + set of delimiters allows for custom literal expressions. + +Other improvements: +* Prefix declarations are now kept as part of the Knowledge Base and + are used to abbreviate names when exporting inferences. VLog4j v0.5.0 From 70d53373255d8da8fc24e61a4ea8f4372423343a Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 14:04:26 +0100 Subject: [PATCH 0817/1255] Core: Move skolemization into core --- .../implementation/Skolemization.java | 61 +++++++++++++++ .../implementation/SkolemizationTest.java | 76 +++++++++++++++++++ .../parser/javacc/JavaCCParserBase.java | 23 ++---- .../parser/javacc/SubParserFactory.java | 7 +- 4 files changed, 149 insertions(+), 18 deletions(-) create mode 100644 vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java create mode 100644 vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java new file mode 100644 index 000000000..2a6269ebb --- /dev/null +++ b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java @@ -0,0 +1,61 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.UUID; + +import org.semanticweb.vlog4j.core.model.api.NamedNull; +import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; + +/** + * A class that implements skolemization of named null names. The same + * name should be skolemized to the same {@link NamedNull} when + * skolemized using the same instance, but to different instances of + * {@link NamedNull} when skolemized using different instances of + * {@link Skolemization}. + * + * @author Maximilian Marx + */ +public class Skolemization { + /** + * The namespace to use for skolemizing named null names. + */ + private final byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); + + /** + * Skolemize a named null name. The same {@code name} will map to + * a {@link RenamedNamedNull} instance with the same name when + * called on the same instance. + * + * @throws IOException when ByteArrayOutputStream throws. + * @return a {@link RenamedNamedNull} instance with a new name + * that is specific to this instance and {@code name}. + */ + public RenamedNamedNull skolemizeNamedNull(String name) throws IOException { + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + stream.write(namedNullNamespace); + stream.write(name.getBytes()); + + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java new file mode 100644 index 000000000..bebe220d2 --- /dev/null +++ b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java @@ -0,0 +1,76 @@ +package org.semanticweb.vlog4j.core.reasoner.implementation; + +/*- + * #%L + * vlog4j-parser + * %% + * Copyright (C) 2018 - 2019 VLog4j Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.vlog4j.core.model.api.NamedNull; + +public class SkolemizationTest { + private Skolemization skolemization; + private final static String name1 = "_:1"; + private final static String name2 = "_:2"; + + @Before + public void init() { + this.skolemization = new Skolemization(); + } + + @Test + public void skolemizeNamedNull_sameName_mapsToSameNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + NamedNull null2 = skolemization.skolemizeNamedNull(name1); + + assertEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentName_mapsToDifferentNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + NamedNull null2 = skolemization.skolemizeNamedNull(name2); + + assertNotEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentInstances_mapsToDifferentNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + Skolemization other = new Skolemization(); + NamedNull null2 = other.skolemizeNamedNull(name1); + + assertNotEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentNamedNull() throws IOException { + NamedNull null1 = skolemization.skolemizeNamedNull(name1); + Skolemization other = new Skolemization(); + NamedNull null2 = other.skolemizeNamedNull(name2); + + assertNotEquals(null1.getName(), null2.getName()); + assertEquals(null1.getName(), skolemization.skolemizeNamedNull(name1).getName()); + assertEquals(null2.getName(), other.skolemizeNamedNull(name2).getName()); + } +} diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java index ac977898e..c284342d3 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/JavaCCParserBase.java @@ -1,8 +1,5 @@ package org.semanticweb.vlog4j.parser.javacc; -import java.io.ByteArrayOutputStream; -import java.io.IOException; - /*- * #%L * vlog4j-parser @@ -23,9 +20,9 @@ * #L% */ +import java.io.IOException; import java.util.HashSet; import java.util.List; -import java.util.UUID; import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; import org.semanticweb.vlog4j.core.model.api.AbstractConstant; @@ -38,8 +35,8 @@ import org.semanticweb.vlog4j.core.model.api.Term; import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.Skolemization; import org.semanticweb.vlog4j.parser.DefaultParserConfiguration; import org.semanticweb.vlog4j.parser.DirectiveArgument; import org.semanticweb.vlog4j.parser.LocalPrefixDeclarationRegistry; @@ -66,7 +63,7 @@ public class JavaCCParserBase { private KnowledgeBase knowledgeBase; private ParserConfiguration parserConfiguration; - private byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); + private Skolemization skolemization = new Skolemization(); /** * "Local" variable to remember (universal) body variables during parsing. @@ -165,15 +162,11 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } NamedNull createNamedNull(String lexicalForm) throws ParseException { - ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { - stream.write(namedNullNamespace); - stream.write(lexicalForm.getBytes()); + return this.skolemization.skolemizeNamedNull(lexicalForm); } catch (IOException e) { throw makeParseExceptionWithCause("Failed to generate a unique name for named null", e); } - - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); } void addStatement(Statement statement) { @@ -326,12 +319,12 @@ public ParserConfiguration getParserConfiguration() { return parserConfiguration; } - byte[] getNamedNullNamespace() { - return namedNullNamespace; + Skolemization getSkolemization() { + return skolemization; } - void setNamedNullNamespace(byte[] namedNullNamespace) { - this.namedNullNamespace = namedNullNamespace; + void setSkolemization(Skolemization skolemization) { + this.skolemization = skolemization; } public void setPrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) { diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java index 2332afa83..ba1f38155 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java +++ b/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/javacc/SubParserFactory.java @@ -26,6 +26,7 @@ import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.vlog4j.core.reasoner.implementation.Skolemization; import org.semanticweb.vlog4j.parser.ParserConfiguration; import org.semanticweb.vlog4j.parser.RuleParser; @@ -41,7 +42,7 @@ public class SubParserFactory { private final KnowledgeBase knowledgeBase; private final ParserConfiguration parserConfiguration; private final PrefixDeclarationRegistry prefixDeclarationRegistry; - private final byte[] namedNullNamespace; + private final Skolemization skolemization; /** * Construct a SubParserFactory. @@ -52,7 +53,7 @@ public class SubParserFactory { this.knowledgeBase = parser.getKnowledgeBase(); this.prefixDeclarationRegistry = parser.getPrefixDeclarationRegistry(); this.parserConfiguration = parser.getParserConfiguration(); - this.namedNullNamespace = parser.getNamedNullNamespace(); + this.skolemization = parser.getSkolemization(); } /** @@ -69,7 +70,7 @@ public JavaCCParser makeSubParser(final InputStream inputStream, final String en subParser.setKnowledgeBase(this.knowledgeBase); subParser.setPrefixDeclarationRegistry(this.prefixDeclarationRegistry); subParser.setParserConfiguration(this.parserConfiguration); - subParser.setNamedNullNamespace(this.namedNullNamespace); + subParser.setSkolemization(this.skolemization); return subParser; } From ede5576e814223f504400f5391b32e94f97eac5e Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 14:43:19 +0100 Subject: [PATCH 0818/1255] Rename all files, directories, and artifacts --- .gitignore | 10 +- LICENSE.txt | 402 +++++++++--------- README.md | 40 +- RELEASE-NOTES.md | 5 +- build-vlog-library.sh | 6 +- coverage/pom.xml | 28 +- pom.xml | 53 +-- .../LICENSE.txt | 0 {vlog4j-client => rulewerk-client}/pom.xml | 20 +- .../rulewerk}/client/picocli/ClientUtils.java | 10 +- .../client/picocli/PrintQueryResults.java | 2 +- .../rulewerk}/client/picocli/SaveModel.java | 2 +- .../client/picocli/SaveQueryResults.java | 2 +- .../client/picocli/VLog4jClient.java | 2 +- .../picocli/VLog4jClientMaterialize.java | 18 +- .../client/picocli/PrintQueryResultsTest.java | 6 +- .../client/picocli/SaveModelTest.java | 2 +- .../client/picocli/SaveQueryResultsTest.java | 2 +- {vlog4j-core => rulewerk-core}/LICENSE.txt | 0 {vlog4j-core => rulewerk-core}/pom.xml | 130 +++--- .../IncompatiblePredicateArityException.java | 6 +- .../PrefixDeclarationException.java | 2 +- .../exceptions/ReasonerStateException.java | 4 +- .../core/exceptions/VLog4jException.java | 2 +- .../exceptions/VLog4jRuntimeException.java | 2 +- .../core/model/api/AbstractConstant.java | 4 +- .../rulewerk}/core/model/api/Conjunction.java | 4 +- .../rulewerk}/core/model/api/Constant.java | 2 +- .../rulewerk}/core/model/api/DataSource.java | 2 +- .../core/model/api/DataSourceDeclaration.java | 4 +- .../core/model/api/DatatypeConstant.java | 4 +- .../rulewerk}/core/model/api/Entity.java | 2 +- .../core/model/api/ExistentialVariable.java | 4 +- .../rulewerk}/core/model/api/Fact.java | 4 +- .../model/api/LanguageStringConstant.java | 4 +- .../rulewerk}/core/model/api/Literal.java | 4 +- .../rulewerk}/core/model/api/NamedNull.java | 4 +- .../core/model/api/NegativeLiteral.java | 2 +- .../core/model/api/PositiveLiteral.java | 2 +- .../rulewerk}/core/model/api/Predicate.java | 4 +- .../model/api/PrefixDeclarationRegistry.java | 4 +- .../rulewerk}/core/model/api/QueryResult.java | 2 +- .../rulewerk}/core/model/api/Rule.java | 4 +- .../rulewerk}/core/model/api/Statement.java | 2 +- .../core/model/api/StatementVisitor.java | 2 +- .../core/model/api/SyntaxObject.java | 2 +- .../rulewerk}/core/model/api/Term.java | 2 +- .../rulewerk}/core/model/api/TermType.java | 2 +- .../rulewerk}/core/model/api/TermVisitor.java | 2 +- .../rulewerk}/core/model/api/Terms.java | 2 +- .../core/model/api/UniversalVariable.java | 4 +- .../rulewerk}/core/model/api/Variable.java | 2 +- .../implementation/AbstractConstantImpl.java | 6 +- .../implementation/AbstractLiteralImpl.java | 8 +- .../AbstractPrefixDeclarationRegistry.java | 6 +- .../implementation/AbstractTermImpl.java | 4 +- .../model/implementation/ConjunctionImpl.java | 8 +- .../DataSourceDeclarationImpl.java | 10 +- .../implementation/DatatypeConstantImpl.java | 6 +- .../ExistentialVariableImpl.java | 6 +- .../model/implementation/Expressions.java | 28 +- .../core/model/implementation/FactImpl.java | 10 +- .../LanguageStringConstantImpl.java | 6 +- .../MergingPrefixDeclarationRegistry.java | 4 +- .../model/implementation/NamedNullImpl.java | 6 +- .../implementation/NegativeLiteralImpl.java | 8 +- .../implementation/PositiveLiteralImpl.java | 8 +- .../model/implementation/PredicateImpl.java | 4 +- .../implementation/RenamedNamedNull.java | 6 +- .../core/model/implementation/RuleImpl.java | 16 +- .../core/model/implementation/Serializer.java | 44 +- .../implementation/UniversalVariableImpl.java | 6 +- .../core/reasoner/AcyclicityNotion.java | 2 +- .../rulewerk}/core/reasoner/Algorithm.java | 2 +- .../rulewerk}/core/reasoner/Correctness.java | 2 +- .../core/reasoner/CyclicityResult.java | 2 +- .../core/reasoner/KnowledgeBase.java | 28 +- .../core/reasoner/KnowledgeBaseListener.java | 4 +- .../rulewerk}/core/reasoner/LogLevel.java | 2 +- .../core/reasoner/QueryAnswerCount.java | 2 +- .../core/reasoner/QueryResultIterator.java | 4 +- .../rulewerk}/core/reasoner/Reasoner.java | 22 +- .../core/reasoner/ReasonerState.java | 2 +- .../core/reasoner/RuleRewriteStrategy.java | 2 +- .../implementation/CsvFileDataSource.java | 4 +- .../EmptyQueryResultIterator.java | 8 +- .../implementation/FileDataSource.java | 2 +- .../implementation/InMemoryDataSource.java | 6 +- .../implementation/ModelToVLogConverter.java | 22 +- .../implementation/QueryAnswerCountImpl.java | 6 +- .../implementation/QueryResultImpl.java | 6 +- .../implementation/RdfFileDataSource.java | 4 +- .../implementation/Skolemization.java | 6 +- .../SparqlQueryResultDataSource.java | 6 +- .../implementation/TermToVLogConverter.java | 20 +- .../implementation/VLogDataSource.java | 4 +- .../implementation/VLogKnowledgeBase.java | 34 +- .../VLogQueryResultIterator.java | 8 +- .../reasoner/implementation/VLogReasoner.java | 50 +-- .../implementation/VLogToModelConverter.java | 20 +- .../src/test/data/input/binaryFacts.csv | 0 .../src/test/data/input/constantD.csv | 0 .../src/test/data/input/empty.csv | 0 .../test/data/input/invalidFormatNtFacts.nt | 0 .../src/test/data/input/ternaryFacts.nt | 0 .../test/data/input/ternaryFactsZipped.nt.gz | Bin .../src/test/data/input/unaryFacts.csv | 0 .../src/test/data/input/unaryFactsCD.csv | 0 .../test/data/input/unaryFactsZipped.csv.gz | Bin .../src/test/data/output/.keep | 0 .../core/model/ConjunctionImplTest.java | 18 +- .../core/model/DataSourceDeclarationTest.java | 20 +- .../rulewerk}/core/model/FactTest.java | 14 +- .../MergingPrefixDeclarationRegistryTest.java | 8 +- .../core/model/NegativeLiteralImplTest.java | 20 +- .../core/model/PositiveLiteralImplTest.java | 20 +- .../core/model/PredicateImplTest.java | 8 +- .../rulewerk}/core/model/RuleImplTest.java | 24 +- .../rulewerk}/core/model/TermImplTest.java | 24 +- .../core/reasoner/KnowledgeBaseTest.java | 10 +- .../rulewerk}/core/reasoner/LoggingTest.java | 16 +- .../core/reasoner/ReasonerTimeoutTest.java | 16 +- .../implementation/AddDataSourceTest.java | 22 +- .../implementation/AnswerQueryTest.java | 28 +- .../implementation/CsvFileDataSourceTest.java | 4 +- .../FileDataSourceTestUtils.java | 16 +- .../GeneratedAnonymousIndividualsTest.java | 22 +- .../ModelToVLogConverterTest.java | 24 +- .../implementation/QueryAnswerCountTest.java | 22 +- .../QueryAnsweringCorrectnessTest.java | 30 +- .../implementation/QueryResultImplTest.java | 12 +- .../implementation/QueryResultsUtils.java | 8 +- .../implementation/RdfFileDataSourceTest.java | 4 +- .../implementation/SkolemizationTest.java | 4 +- .../SparqlQueryResultDataSourceTest.java | 6 +- .../implementation/VLogReasonerBasics.java | 22 +- .../VLogReasonerCombinedInputs.java | 24 +- .../implementation/VLogReasonerCsvInput.java | 24 +- .../implementation/VLogReasonerCsvOutput.java | 14 +- .../implementation/VLogReasonerNegation.java | 22 +- .../implementation/VLogReasonerRdfInput.java | 16 +- .../VLogReasonerSparqlInput.java | 18 +- .../implementation/VLogReasonerStateTest.java | 34 +- .../VLogReasonerWriteInferencesTest.java | 30 +- .../VLogToModelConverterTest.java | 14 +- .../vlog/ExportQueryResultToCsvFileTest.java | 4 +- .../core/reasoner/vlog/LargeAritiesTest.java | 2 +- .../reasoner/vlog/StratifiedNegationTest.java | 4 +- .../vlog/VLogDataFromCsvFileTest.java | 4 +- .../reasoner/vlog/VLogDataFromMemoryTest.java | 2 +- .../vlog/VLogDataFromRdfFileTest.java | 4 +- .../core/reasoner/vlog/VLogExpressions.java | 2 +- .../reasoner/vlog/VLogQueryResultUtils.java | 2 +- .../core/reasoner/vlog/VLogQueryTest.java | 2 +- .../core/reasoner/vlog/VLogTermNamesTest.java | 2 +- .../LICENSE.txt | 0 .../README.md | 0 .../pom.xml | 179 ++++---- .../src/main/data/.gitignore | 0 .../src/main/data/input/bicycleEDB.csv.gz | Bin .../main/data/input/counting-triangles.rls | 0 .../src/main/data/input/doid.nt.gz | Bin .../src/main/data/input/doid.rls | 0 .../main/data/input/graal/doid-example.dlgp | 0 .../src/main/data/input/graal/example.dlgp | 0 .../src/main/data/input/hasPartEDB.csv.gz | Bin .../src/main/data/input/owl/bike.owl | 0 .../rdf/iswc-2016-complete-alignments.rdf | 0 .../main/data/input/ternaryBicycleEDB.nt.gz | Bin .../src/main/data/input/wheelEDB.csv.gz | Bin .../src/main/data/output/.keep | 0 .../examples/CompareWikidataDBpedia.java | 12 +- .../rulewerk}/examples/CountingTriangles.java | 10 +- .../rulewerk}/examples/DoidExample.java | 14 +- .../rulewerk}/examples/ExamplesUtils.java | 22 +- .../InMemoryGraphAnalysisExample.java | 18 +- .../examples/SimpleReasoningExample.java | 12 +- .../examples/core/AddDataFromCsvFile.java | 18 +- .../examples/core/AddDataFromRdfFile.java | 20 +- .../core/AddDataFromSparqlQueryResults.java | 30 +- .../core/ConfigureReasonerLogging.java | 12 +- .../SkolemVsRestrictedChaseTermination.java | 16 +- .../examples/graal/AddDataFromDlgpFile.java | 12 +- .../examples/graal/AddDataFromGraal.java | 16 +- .../examples/graal/DoidExampleGraal.java | 32 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 26 +- .../examples/rdf/AddDataFromRdfModel.java | 28 +- .../src/main/logs/.keep | 0 {vlog4j-graal => rulewerk-graal}/LICENSE.txt | 0 {vlog4j-graal => rulewerk-graal}/pom.xml | 10 +- .../graal/GraalConjunctiveQueryToRule.java | 14 +- .../graal/GraalConvertException.java | 2 +- .../graal/GraalToVLog4JModelConverter.java | 16 +- .../GraalToVLog4JModelConverterTest.java | 16 +- .../LICENSE.txt | 0 {vlog4j-owlapi => rulewerk-owlapi}/pom.xml | 72 ++-- .../owlapi/AbstractClassToRuleConverter.java | 12 +- .../owlapi/ClassToRuleBodyConverter.java | 10 +- .../owlapi/ClassToRuleHeadConverter.java | 8 +- .../owlapi/OwlAxiomToRulesConverter.java | 32 +- .../OwlFeatureNotSupportedException.java | 2 +- .../owlapi/OwlToRulesConversionHelper.java | 22 +- .../rulewerk}/owlapi/OwlToRulesConverter.java | 6 +- .../owlapi/OwlAxiomToRulesConverterTest.java | 16 +- .../LICENSE.txt | 0 {vlog4j-parser => rulewerk-parser}/pom.xml | 12 +- .../parser/ConfigurableLiteralHandler.java | 6 +- .../parser/DataSourceDeclarationHandler.java | 4 +- .../parser/DatatypeConstantHandler.java | 4 +- .../parser/DefaultParserConfiguration.java | 14 +- .../rulewerk}/parser/DirectiveArgument.java | 4 +- .../rulewerk}/parser/DirectiveHandler.java | 12 +- .../LocalPrefixDeclarationRegistry.java | 8 +- .../rulewerk}/parser/ParserConfiguration.java | 30 +- .../rulewerk}/parser/ParsingException.java | 4 +- .../rulewerk}/parser/RuleParser.java | 30 +- .../CsvFileDataSourceDeclarationHandler.java | 16 +- .../RdfFileDataSourceDeclarationHandler.java | 16 +- ...eryResultDataSourceDeclarationHandler.java | 16 +- .../ImportFileDirectiveHandler.java | 16 +- .../ImportFileRelativeDirectiveHandler.java | 18 +- .../rulewerk}/parser/javacc/.gitignore | 0 .../rulewerk}/parser/javacc/JavaCCParser.jj | 44 +- .../parser/javacc/JavaCCParserBase.java | 38 +- .../parser/javacc/SubParserFactory.java | 12 +- .../parser/DirectiveArgumentTest.java | 6 +- .../parser/DirectiveHandlerTest.java | 6 +- .../rulewerk}/parser/EntityTest.java | 30 +- .../parser/ParserConfigurationTest.java | 14 +- .../rulewerk}/parser/ParserTestUtils.java | 10 +- .../RuleParserConfigurableLiteralTest.java | 20 +- .../parser/RuleParserDataSourceTest.java | 32 +- .../parser/RuleParserParseFactTest.java | 24 +- .../rulewerk}/parser/RuleParserTest.java | 32 +- .../parser/javacc/JavaCCParserBaseTest.java | 12 +- .../src/test/resources/base.rls | 0 .../src/test/resources/blank.rls | 0 .../src/test/resources/facts.rls | 0 {vlog4j-rdf => rulewerk-rdf}/LICENSE.txt | 0 {vlog4j-rdf => rulewerk-rdf}/pom.xml | 10 +- .../rulewerk}/rdf/RdfModelConverter.java | 16 +- .../rdf/RdfValueToTermConverter.java | 14 +- .../src/test/data/input/collections.ttl | 0 .../src/test/data/input/escapedCharacters.ttl | 0 .../src/test/data/input/exampleFacts.ttl | 0 .../src/test/data/input/labelledBNodes.ttl | 0 .../src/test/data/input/languageTags.ttl | 0 .../src/test/data/input/literalValues.ttl | 0 .../src/test/data/input/relativeURIs.ttl | 0 .../src/test/data/input/unlabelledBNodes.ttl | 0 .../data/input/unnormalizedLiteralValues.ttl | 0 .../src/test/data/output/.keep | 0 .../rulewerk}/rdf/RdfTestUtils.java | 12 +- .../rdf/TestConvertRdfFileToFacts.java | 22 +- .../rulewerk}/rdf/TestReasonOverRdfFacts.java | 22 +- 255 files changed, 1634 insertions(+), 1629 deletions(-) rename {vlog4j-client => rulewerk-client}/LICENSE.txt (100%) rename {vlog4j-client => rulewerk-client}/pom.xml (82%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/ClientUtils.java (92%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/PrintQueryResults.java (98%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/SaveModel.java (98%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/SaveQueryResults.java (98%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/VLog4jClient.java (96%) rename {vlog4j-client/src/main/java/org/semanticweb/vlog4j => rulewerk-client/src/main/java/org/semanticweb/rulewerk}/client/picocli/VLog4jClientMaterialize.java (93%) rename {vlog4j-client/src/test/java/org/semanticweb/vlog4j => rulewerk-client/src/test/java/org/semanticweb/rulewerk}/client/picocli/PrintQueryResultsTest.java (96%) rename {vlog4j-client/src/test/java/org/semanticweb/vlog4j => rulewerk-client/src/test/java/org/semanticweb/rulewerk}/client/picocli/SaveModelTest.java (99%) rename {vlog4j-client/src/test/java/org/semanticweb/vlog4j => rulewerk-client/src/test/java/org/semanticweb/rulewerk}/client/picocli/SaveQueryResultsTest.java (99%) rename {vlog4j-core => rulewerk-core}/LICENSE.txt (100%) rename {vlog4j-core => rulewerk-core}/pom.xml (79%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/IncompatiblePredicateArityException.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/PrefixDeclarationException.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/ReasonerStateException.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/VLog4jException.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/exceptions/VLog4jRuntimeException.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/AbstractConstant.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Conjunction.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Constant.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/DataSource.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/DataSourceDeclaration.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/DatatypeConstant.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Entity.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/ExistentialVariable.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Fact.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/LanguageStringConstant.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Literal.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/NamedNull.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/NegativeLiteral.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/PositiveLiteral.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Predicate.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/PrefixDeclarationRegistry.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/QueryResult.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Rule.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Statement.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/StatementVisitor.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/SyntaxObject.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Term.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/TermType.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/TermVisitor.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Terms.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/UniversalVariable.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/api/Variable.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractConstantImpl.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractLiteralImpl.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractPrefixDeclarationRegistry.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/AbstractTermImpl.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/ConjunctionImpl.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/DataSourceDeclarationImpl.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/DatatypeConstantImpl.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/ExistentialVariableImpl.java (86%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/Expressions.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/FactImpl.java (81%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/LanguageStringConstantImpl.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/MergingPrefixDeclarationRegistry.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/NamedNullImpl.java (88%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/NegativeLiteralImpl.java (80%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/PositiveLiteralImpl.java (80%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/PredicateImpl.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/RenamedNamedNull.java (84%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/RuleImpl.java (88%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/Serializer.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/model/implementation/UniversalVariableImpl.java (86%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/AcyclicityNotion.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/Algorithm.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/Correctness.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/CyclicityResult.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/KnowledgeBase.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/KnowledgeBaseListener.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/LogLevel.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/QueryAnswerCount.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/QueryResultIterator.java (92%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/Reasoner.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/ReasonerState.java (97%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/RuleRewriteStrategy.java (91%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/CsvFileDataSource.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/EmptyQueryResultIterator.java (82%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/FileDataSource.java (98%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/InMemoryDataSource.java (95%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/ModelToVLogConverter.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryAnswerCountImpl.java (88%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryResultImpl.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/RdfFileDataSource.java (94%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/Skolemization.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/SparqlQueryResultDataSource.java (96%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/TermToVLogConverter.java (84%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogDataSource.java (90%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogKnowledgeBase.java (89%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogQueryResultIterator.java (87%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasoner.java (93%) rename {vlog4j-core/src/main/java/org/semanticweb/vlog4j => rulewerk-core/src/main/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogToModelConverter.java (86%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/binaryFacts.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/constantD.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/empty.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/invalidFormatNtFacts.nt (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/ternaryFacts.nt (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/ternaryFactsZipped.nt.gz (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/unaryFacts.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/unaryFactsCD.csv (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/input/unaryFactsZipped.csv.gz (100%) rename {vlog4j-core => rulewerk-core}/src/test/data/output/.keep (100%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/ConjunctionImplTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/DataSourceDeclarationTest.java (88%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/FactTest.java (81%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/MergingPrefixDeclarationRegistryTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/NegativeLiteralImplTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/PositiveLiteralImplTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/PredicateImplTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/RuleImplTest.java (90%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/model/TermImplTest.java (87%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/KnowledgeBaseTest.java (92%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/LoggingTest.java (92%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/ReasonerTimeoutTest.java (91%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/AddDataSourceTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/AnswerQueryTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/CsvFileDataSourceTest.java (95%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/FileDataSourceTestUtils.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java (89%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/ModelToVLogConverterTest.java (91%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryAnswerCountTest.java (95%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryResultImplTest.java (80%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/QueryResultsUtils.java (88%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/RdfFileDataSourceTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/SkolemizationTest.java (95%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerBasics.java (80%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerCombinedInputs.java (88%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerCsvInput.java (85%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerCsvOutput.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerNegation.java (83%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerRdfInput.java (90%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerSparqlInput.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerStateTest.java (93%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java (85%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/implementation/VLogToModelConverterTest.java (87%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java (94%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/LargeAritiesTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/StratifiedNegationTest.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogDataFromCsvFileTest.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogDataFromMemoryTest.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogDataFromRdfFileTest.java (96%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogExpressions.java (98%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogQueryResultUtils.java (97%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogQueryTest.java (98%) rename {vlog4j-core/src/test/java/org/semanticweb/vlog4j => rulewerk-core/src/test/java/org/semanticweb/rulewerk}/core/reasoner/vlog/VLogTermNamesTest.java (98%) rename {vlog4j-examples => rulewerk-examples}/LICENSE.txt (100%) rename {vlog4j-examples => rulewerk-examples}/README.md (100%) rename {vlog4j-examples => rulewerk-examples}/pom.xml (79%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/.gitignore (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/bicycleEDB.csv.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/counting-triangles.rls (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/doid.nt.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/doid.rls (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/graal/doid-example.dlgp (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/graal/example.dlgp (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/hasPartEDB.csv.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/owl/bike.owl (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/ternaryBicycleEDB.nt.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/input/wheelEDB.csv.gz (100%) rename {vlog4j-examples => rulewerk-examples}/src/main/data/output/.keep (100%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/CompareWikidataDBpedia.java (92%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/CountingTriangles.java (89%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/DoidExample.java (86%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/ExamplesUtils.java (87%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/InMemoryGraphAnalysisExample.java (85%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/SimpleReasoningExample.java (90%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/AddDataFromCsvFile.java (87%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/AddDataFromRdfFile.java (86%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/AddDataFromSparqlQueryResults.java (86%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/ConfigureReasonerLogging.java (92%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/core/SkolemVsRestrictedChaseTermination.java (88%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/graal/AddDataFromDlgpFile.java (93%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/graal/AddDataFromGraal.java (91%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/graal/DoidExampleGraal.java (84%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/owlapi/OwlOntologyToRulesAndFacts.java (84%) rename {vlog4j-examples/src/main/java/org/semanticweb/vlog4j => rulewerk-examples/src/main/java/org/semanticweb/rulewerk}/examples/rdf/AddDataFromRdfModel.java (87%) rename {vlog4j-examples => rulewerk-examples}/src/main/logs/.keep (100%) rename {vlog4j-graal => rulewerk-graal}/LICENSE.txt (100%) rename {vlog4j-graal => rulewerk-graal}/pom.xml (80%) rename {vlog4j-graal/src/main/java/org/semanticweb/vlog4j => rulewerk-graal/src/main/java/org/semanticweb/rulewerk}/graal/GraalConjunctiveQueryToRule.java (90%) rename {vlog4j-graal/src/main/java/org/semanticweb/vlog4j => rulewerk-graal/src/main/java/org/semanticweb/rulewerk}/graal/GraalConvertException.java (96%) rename {vlog4j-graal/src/main/java/org/semanticweb/vlog4j => rulewerk-graal/src/main/java/org/semanticweb/rulewerk}/graal/GraalToVLog4JModelConverter.java (96%) rename {vlog4j-graal/src/test/java/org/semanticweb/vlog4j => rulewerk-graal/src/test/java/org/semanticweb/rulewerk}/graal/GraalToVLog4JModelConverterTest.java (96%) rename {vlog4j-owlapi => rulewerk-owlapi}/LICENSE.txt (100%) rename {vlog4j-owlapi => rulewerk-owlapi}/pom.xml (79%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/AbstractClassToRuleConverter.java (96%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/ClassToRuleBodyConverter.java (95%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/ClassToRuleHeadConverter.java (96%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlAxiomToRulesConverter.java (95%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlFeatureNotSupportedException.java (96%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlToRulesConversionHelper.java (90%) rename {vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk}/owlapi/OwlToRulesConverter.java (92%) rename {vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j => rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk}/owlapi/OwlAxiomToRulesConverterTest.java (98%) rename {vlog4j-parser => rulewerk-parser}/LICENSE.txt (100%) rename {vlog4j-parser => rulewerk-parser}/pom.xml (93%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/ConfigurableLiteralHandler.java (89%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DataSourceDeclarationHandler.java (89%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DatatypeConstantHandler.java (91%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DefaultParserConfiguration.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DirectiveArgument.java (98%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/DirectiveHandler.java (95%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/LocalPrefixDeclarationRegistry.java (90%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/ParserConfiguration.java (90%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/ParsingException.java (91%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/RuleParser.java (91%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/datasources/CsvFileDataSourceDeclarationHandler.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/datasources/RdfFileDataSourceDeclarationHandler.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java (74%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/directives/ImportFileDirectiveHandler.java (78%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/directives/ImportFileRelativeDirectiveHandler.java (77%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/.gitignore (100%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/JavaCCParser.jj (93%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/JavaCCParserBase.java (89%) rename {vlog4j-parser/src/main/java/org/semanticweb/vlog4j => rulewerk-parser/src/main/java/org/semanticweb/rulewerk}/parser/javacc/SubParserFactory.java (87%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/DirectiveArgumentTest.java (94%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/DirectiveHandlerTest.java (96%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/EntityTest.java (90%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/ParserConfigurationTest.java (90%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/ParserTestUtils.java (83%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserConfigurableLiteralTest.java (95%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserDataSourceTest.java (90%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserParseFactTest.java (80%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/RuleParserTest.java (95%) rename {vlog4j-parser/src/test/java/org/semanticweb/vlog4j => rulewerk-parser/src/test/java/org/semanticweb/rulewerk}/parser/javacc/JavaCCParserBaseTest.java (89%) rename {vlog4j-parser => rulewerk-parser}/src/test/resources/base.rls (100%) rename {vlog4j-parser => rulewerk-parser}/src/test/resources/blank.rls (100%) rename {vlog4j-parser => rulewerk-parser}/src/test/resources/facts.rls (100%) rename {vlog4j-rdf => rulewerk-rdf}/LICENSE.txt (100%) rename {vlog4j-rdf => rulewerk-rdf}/pom.xml (88%) rename {vlog4j-rdf/src/main/java/org/semanticweb/vlog4j => rulewerk-rdf/src/main/java/org/semanticweb/rulewerk}/rdf/RdfModelConverter.java (89%) rename {vlog4j-rdf/src/main/java/org/semanticweb/vlog4j => rulewerk-rdf/src/main/java/org/semanticweb/rulewerk}/rdf/RdfValueToTermConverter.java (81%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/collections.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/escapedCharacters.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/exampleFacts.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/labelledBNodes.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/languageTags.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/literalValues.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/relativeURIs.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/unlabelledBNodes.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/input/unnormalizedLiteralValues.ttl (100%) rename {vlog4j-rdf => rulewerk-rdf}/src/test/data/output/.keep (100%) rename {vlog4j-rdf/src/test/java/org/semanticweb/vlog4j => rulewerk-rdf/src/test/java/org/semanticweb/rulewerk}/rdf/RdfTestUtils.java (90%) rename {vlog4j-rdf/src/test/java/org/semanticweb/vlog4j => rulewerk-rdf/src/test/java/org/semanticweb/rulewerk}/rdf/TestConvertRdfFileToFacts.java (94%) rename {vlog4j-rdf/src/test/java/org/semanticweb/vlog4j => rulewerk-rdf/src/test/java/org/semanticweb/rulewerk}/rdf/TestReasonOverRdfFacts.java (85%) diff --git a/.gitignore b/.gitignore index 15e338099..8454baf10 100644 --- a/.gitignore +++ b/.gitignore @@ -32,7 +32,7 @@ target/ # Don't apply the above to src/ where Java requires # subdirectories named according to package names. # We do not want to forbid things like "dumpfiles" in -# package names. +# package names. !src/ # Use as directory for local testing code @@ -49,9 +49,9 @@ Thumbs.db # Output of tests and examples *.log -vlog4j-core/src/test/data/output/* -vlog4j-examples/src/main/data/output/* -vlog4j-examples/src/main/data/logs/* -vlog4j-rdf/src/main/data/output/* +rulewerk-core/src/test/data/output/* +rulewerk-examples/src/main/data/output/* +rulewerk-examples/src/main/data/logs/* +rulewerk-rdf/src/main/data/output/* /build-vlog/vlog/ /TAGS diff --git a/LICENSE.txt b/LICENSE.txt index 29f81d812..261eeb9e9 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index dbb5a647b..14a23a5e3 100644 --- a/README.md +++ b/README.md @@ -1,36 +1,36 @@ -VLog4j +Rulewerk ====== -[![Build Status](https://travis-ci.org/knowsys/vlog4j.png?branch=master)](https://travis-ci.org/knowsys/vlog4j) -[![Coverage Status](https://coveralls.io/repos/github/knowsys/vlog4j/badge.svg?branch=master)](https://coveralls.io/github/knowsys/vlog4j?branch=master) -[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.vlog4j/vlog4j-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.vlog4j%22) +[![Build Status](https://travis-ci.org/knowsys/rulewerk.png?branch=master)](https://travis-ci.org/knowsys/rulewerk) +[![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) +[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) A Java library based on the [VLog rule engine](https://github.com/karmaresearch/vlog) Installation ------------ -The current release of VLog4j is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` - org.semanticweb.vlog4j - vlog4j-core + org.semanticweb.rulewerk + rulewerk-core 0.5.0 ``` You need to use Java 1.8 or above. Available modules include: -* **vlog4j-core**: essential data models for rules and facts, and essential reasoner functionality -* **vlog4j-parser**: support for processing knowledge bases in [VLog4j syntax](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar) -* **vlog4j-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files -* **vlog4j-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) -* **vlog4j-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API -* **vlog4j-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/vlog4j/wiki/Standalone-client) for VLog4j. +* **rulewerk-core**: essential data models for rules and facts, and essential reasoner functionality +* **rulewerk-parser**: support for processing knowledge bases in [Rulewerk syntax](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar) +* **rulewerk-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files +* **rulewerk-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) +* **rulewerk-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API +* **rulewerk-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/rulewerk/wiki/Standalone-client) for Rulewerk. -The released packages use vlog4j-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +The released packages use rulewerk-base, which packages system-dependent binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: -* Run [build-vlog-library.sh](https://github.com/knowsys/vlog4j/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./vlog4j-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of vlog4j-base. +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```./rulewerk-core/lib/jvlog-local.jar```, and install the new jar locally in Maven in place of the distributed version of rulewerk-base. * Run ```mvn install``` to test if the setup works @@ -38,10 +38,10 @@ The released packages use vlog4j-base, which packages system-dependent binaries Documentation ------------- -* The module **vlog4j-examples** includes short example programs that demonstrate various features and use cases -* The GitHub project **[VLog4j Example](https://github.com/knowsys/vlog4j-example)** shows how to use VLog4j in own Maven projects and can be used as a skeleton for own projects -* [JavaDoc](https://knowsys.github.io/vlog4j/) is available online and through the Maven packages. -* A VLog4j [Wiki](https://github.com/knowsys/vlog4j/wiki) is available online, with detailed information about vlog4j usage, the supported rule language [examples](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar), and related publications. +* The module **rulewerk-examples** includes short example programs that demonstrate various features and use cases +* The GitHub project **[Rulewerk Example](https://github.com/knowsys/rulewerk-example)** shows how to use Rulewerk in own Maven projects and can be used as a skeleton for own projects +* [JavaDoc](https://knowsys.github.io/rulewerk/) is available online and through the Maven packages. +* A Rulewerk [Wiki](https://github.com/knowsys/rulewerk/wiki) is available online, with detailed information about rulewerk usage, the supported rule language [examples](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar), and related publications. Development ----------- @@ -49,5 +49,5 @@ Development * Pull requests are welcome. * The master branch may require a development version of VLog. Use the script `build-vlog-library.sh` to create and install it on your machine (you may need to delete previous local builds first). -* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.vlog4j.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. +* Users of Eclipse should install the javacc plugin to generate the parser sources. After installing the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. * We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 6fb2310fa..5d2119244 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -1,10 +1,11 @@ -VLog4j Release Notes +Rulewerk Release Notes ==================== -VLog4j v0.6.0 +Rulewerk v0.6.0 ------------- Breaking changes: +* VLog4j is now called Rulewerk. * In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no longer exist. It can be replaced by `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` diff --git a/build-vlog-library.sh b/build-vlog-library.sh index 7008b2fdd..02ecd116f 100755 --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Script to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar on Unix-like systems +# Script to build unreleased snapshots of karmaresearch/vlog into rulewerk-base jar on Unix-like systems if [ -f "./local_builds/jvlog.jar" ] then @@ -21,6 +21,6 @@ else cd ../../.. fi -mkdir local_builds/jvlog.jar vlog4j-core/lib -cp local_builds/jvlog.jar vlog4j-core/lib/jvlog-local.jar +mkdir local_builds/jvlog.jar rulewerk-core/lib +cp local_builds/jvlog.jar rulewerk-core/lib/jvlog-local.jar mvn initialize -Pdevelopment diff --git a/coverage/pom.xml b/coverage/pom.xml index 16e01e7f8..977046572 100644 --- a/coverage/pom.xml +++ b/coverage/pom.xml @@ -4,8 +4,8 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT @@ -14,33 +14,33 @@ coverage - org.semanticweb.vlog4j - vlog4j-core + org.semanticweb.rulewerk + rulewerk-core 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-rdf + org.semanticweb.rulewerk + rulewerk-rdf 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-owlapi + org.semanticweb.rulewerk + rulewerk-owlapi 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-graal + org.semanticweb.rulewerk + rulewerk-graal 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-parser + org.semanticweb.rulewerk + rulewerk-parser 0.6.0-SNAPSHOT - org.semanticweb.vlog4j - vlog4j-client + org.semanticweb.rulewerk + rulewerk-client 0.6.0-SNAPSHOT diff --git a/pom.xml b/pom.xml index ed6f23606..8ec79cb32 100644 --- a/pom.xml +++ b/pom.xml @@ -5,25 +5,25 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT pom - VLog4j + Rulewerk A Java library for working with the VLog rule engine - https://github.com/knowsys/vlog4j + https://github.com/knowsys/rulewerk - - vlog4j-core - vlog4j-rdf - vlog4j-examples - vlog4j-owlapi - vlog4j-graal - vlog4j-parser - vlog4j-client + rulewerk-core + rulewerk-rdf + rulewerk-examples + rulewerk-owlapi + rulewerk-graal + rulewerk-parser + rulewerk-client coverage @@ -38,7 +38,7 @@ - VLog4j Developers + Rulewerk Developers @@ -57,6 +57,11 @@ David Carral david.carral@tu-dresden.de + + maximilian + Maximilian Marx + maximilian.marx@tu-dresden.de + @@ -144,7 +149,7 @@ - org.eclipse.m2e lifecycle-mapping @@ -220,7 +225,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -280,8 +285,8 @@ test - ${project.reporting.outputDirectory}/jacoco-ut @@ -289,7 +294,7 @@ - **/javacc/JavaCCParser.class **/javacc/JavaCCParserConstants.class @@ -303,14 +308,14 @@ - org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} 1.8 - VLog4j homepage]]> + Rulewerk homepage]]> @@ -323,7 +328,7 @@ Publishing javadoc for ${project.artifactId}:${project.version} ${project.reporting.outputDirectory}/apidocs - scm:git:https://github.com/knowsys/vlog4j.git + scm:git:https://github.com/knowsys/rulewerk.git gh-pages @@ -411,9 +416,9 @@ - https://github.com/knowsys/vlog4j.git - scm:git:https://github.com/knowsys/vlog4j.git - scm:git:https://github.com/knowsys/vlog4j.git + https://github.com/knowsys/rulewerk.git + scm:git:https://github.com/knowsys/rulewerk.git + scm:git:https://github.com/knowsys/rulewerk.git diff --git a/vlog4j-client/LICENSE.txt b/rulewerk-client/LICENSE.txt similarity index 100% rename from vlog4j-client/LICENSE.txt rename to rulewerk-client/LICENSE.txt diff --git a/vlog4j-client/pom.xml b/rulewerk-client/pom.xml similarity index 82% rename from vlog4j-client/pom.xml rename to rulewerk-client/pom.xml index 0e7b14f00..cc051d591 100644 --- a/vlog4j-client/pom.xml +++ b/rulewerk-client/pom.xml @@ -5,16 +5,16 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-client + rulewerk-client jar - VLog4j Client - Stand-alone VLog4j application + Rulewerk Client + Stand-alone Rulewerk application UTF-8 @@ -23,15 +23,15 @@ ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} ${project.groupId} - vlog4j-parser + rulewerk-parser ${project.version} - + org.slf4j slf4j-log4j12 ${slf4j.version} @@ -63,11 +63,11 @@ shade - standalone-vlog4j-client-${project.version} + standalone-rulewerk-client-${project.version} - org.semanticweb.vlog4j.client.picocli.VLog4jClient + org.semanticweb.rulewerk.client.picocli.RulewerkClient diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java similarity index 92% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index c0f81099c..4984fba5e 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L @@ -26,9 +26,9 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; /** * Utility class for interacting with the vlog4j client. @@ -56,7 +56,7 @@ private ClientUtils() { * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using - * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. + * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. * It is also possible to specify a separate log file for this part of the logs. */ public static void configureLogging() { diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java similarity index 98% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index 1fb824007..935dde8fc 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java similarity index 98% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index 5d8fd08a7..1af92ac6a 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java similarity index 98% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index b694cb532..5e593b00c 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java similarity index 96% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java index a0535ec79..8663d80d4 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L diff --git a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java similarity index 93% rename from vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java index 44969e879..52bc1e777 100644 --- a/vlog4j-client/src/main/java/org/semanticweb/vlog4j/client/picocli/VLog4jClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L @@ -26,14 +26,14 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; import picocli.CommandLine.ArgGroup; import picocli.CommandLine.Command; import picocli.CommandLine.Option; diff --git a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java similarity index 96% rename from vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java index 167b66fb4..6cf1df352 100644 --- a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/PrintQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2019 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java similarity index 99% rename from vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java index 17074eb37..7a4b65532 100644 --- a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveModelTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; diff --git a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java similarity index 99% rename from vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java rename to rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java index 124511f5d..2ef16cb40 100644 --- a/vlog4j-client/src/test/java/org/semanticweb/vlog4j/client/picocli/SaveQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.client.picocli; +package org.semanticweb.rulewerk.client.picocli; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; diff --git a/vlog4j-core/LICENSE.txt b/rulewerk-core/LICENSE.txt similarity index 100% rename from vlog4j-core/LICENSE.txt rename to rulewerk-core/LICENSE.txt diff --git a/vlog4j-core/pom.xml b/rulewerk-core/pom.xml similarity index 79% rename from vlog4j-core/pom.xml rename to rulewerk-core/pom.xml index 3c51c676d..991c14904 100644 --- a/vlog4j-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -1,65 +1,65 @@ - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.6.0-SNAPSHOT - - - vlog4j-core - jar - - VLog4j Core Components - Core components of VLog4j: reasoner and model - - - 1.3.3-snapshot - - - - - - ${project.groupId} - vlog4j-base - ${karmaresearch.vlog.version} - - - - - - development - - - - - - org.apache.maven.plugins - maven-install-plugin - 2.4 - - - initialize - - install-file - - - ${project.groupId} - vlog4j-base - ${karmaresearch.vlog.version} - jar - ./lib/jvlog-local.jar - - - - - - - - - + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-core + jar + + Rulewerk Core Components + Core components of Rulewerk: reasoner and model + + + 1.3.3-snapshot + + + + + + ${project.groupId} + rulewerk-base + ${karmaresearch.vlog.version} + + + + + + development + + + + + + org.apache.maven.plugins + maven-install-plugin + 2.4 + + + initialize + + install-file + + + ${project.groupId} + rulewerk-base + ${karmaresearch.vlog.version} + jar + ./lib/jvlog-local.jar + + + + + + + + + diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index db42ff58c..a274e91cf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L @@ -22,8 +22,8 @@ import java.text.MessageFormat; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Predicate; /** * Expression thrown when attempting to load facts for a {@link Predicate} from diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index 6424a028b..afd7ec5bd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index 7d26dbd47..bd28395dc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -18,11 +18,11 @@ * #L% */ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; import java.text.MessageFormat; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; /** * Thrown when an operation that is invalid in current reasoner state is diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java index c87c6ca3b..7d848760a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java index 56fd985a6..d0adc72e8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/exceptions/VLog4jRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 41f7bd3ce..9ce72ce46 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for abstract constants, i.e. for constants that represent an diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 550d3fa4e..8251cc869 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for representing conjunctions of {@link Literal}s, i.e., lists of diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java index 850ae6f58..0a9c431cd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java index 888d30f77..83390c1c7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index 954574e1f..398e7811a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -1,6 +1,6 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index d4dfe19e8..fb75afea7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for datatype constants, i.e. for constants that represent a diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index d5fd0306e..541f0c598 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 6952de6c3..1ca388565 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for existentially quantified variables, i.e., variables that appear diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index 36e7c1fef..ff082b2eb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -1,6 +1,6 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index 0f1296d0f..e10e49fbb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for string constants with a language tag, used to represent values diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index 791615697..663ecf4a8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for literals. A positive literal is simply an atomic formula, i.e., diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index 297692483..921efca58 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L @@ -22,7 +22,7 @@ import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NegativeLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NegativeLiteral.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java index 1321d77c7..90caac997 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/NegativeLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PositiveLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PositiveLiteral.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java index 90c5f770f..66fa04ad2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PositiveLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index c7b92761b..0a0dc5808 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A Predicate represents a relation between terms. Is uniquely identified by diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 874889211..eb1549f4f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L @@ -22,7 +22,7 @@ import java.util.Map.Entry; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; /** * Registry that manages prefixes and base namespace declarations as used for diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java index 0ea7e1f8e..f3d1cc6d7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 804524b80..8f7b4ee33 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -1,6 +1,6 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java index 464397b18..69c6f83c8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Statement.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java index fc556f18e..ced6c05dc 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/StatementVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/SyntaxObject.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/SyntaxObject.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java index f41891f8c..73dcafc12 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/SyntaxObject.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index fb52ad009..0c631d653 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java index 94ca40a9f..9453cb25c 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java index 7ada2c0d4..1dad479be 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Terms.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Terms.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java index c825c4179..2baf7355e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Terms.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java index d152a3f73..12bc6ab19 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/UniversalVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Interface for universally quantified variables, i.e., variables that appear diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java index 308843bdf..69210b5d0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java index 0820e16de..699a96d41 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index 5e2d141a3..ba4290138 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -27,9 +27,9 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Implements {@link Literal} objects. A literal is a formula of the form diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index d41bfabfb..2f58af65b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Map.Entry; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** * Implementation of the common logic for prefix declaration registries. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java index 0ec6e488f..c8040af01 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Term; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java index 8167f43c2..8f24855d3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -26,9 +26,9 @@ import java.util.stream.Stream; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Simple implementation of {@link Conjunction}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java index 2a905dcbb..1fcb6bd68 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DataSourceDeclarationImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -21,10 +21,10 @@ */ import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; /** * Basic implementation for {@link DataSourceDeclaration}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index a366f7fed..6f42312c0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import org.apache.commons.lang3.Validate; @@ -22,8 +22,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /** * Simple implementation of {@link DatatypeConstant}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java similarity index 86% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java index 685d273a9..953d92c31 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ExistentialVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -20,8 +20,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; /** * Simple implementation of {@link ExistentialVariable}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index 2ffbfcf28..fcb3e01eb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.ArrayList; @@ -25,19 +25,19 @@ import java.util.Arrays; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /** * This utilities class provides static methods for creating terms and formulas diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java similarity index 81% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java index e1712dd37..60ee41579 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/FactImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -22,10 +22,10 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Standard implementation of the {@link Fact} interface. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 8d0bb26f3..9a112ba09 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -21,8 +21,8 @@ */ import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /** * Simple implementation of {@link LanguageStringConstant}. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index bf1d14f8d..a63f73950 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -27,7 +27,7 @@ import java.util.Map; import java.util.Map.Entry; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; /** * Implementation of {@link PrefixDeclarationRegistry} that is suitable for diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java similarity index 88% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java index 5b3a0adc6..ff41632ae 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NamedNullImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /* * #%L @@ -20,8 +20,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /** * Implements {@link NamedNull} terms. A null is an entity used to represent diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java similarity index 80% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NegativeLiteralImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index ceb0a145c..740e8af97 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -23,9 +23,9 @@ import java.util.List; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; public class NegativeLiteralImpl extends AbstractLiteralImpl implements NegativeLiteral { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java similarity index 80% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PositiveLiteralImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index 84c2899e5..83eb4e4e0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -23,9 +23,9 @@ import java.util.List; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; public class PositiveLiteralImpl extends AbstractLiteralImpl implements PositiveLiteral { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 38fac8686..25e9d3c21 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; /*- * #%L @@ -22,7 +22,7 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Predicate; /** * Implementation for {@link Predicate}. Supports predicates of arity 1 or diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java similarity index 84% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java index ef05b14d2..d54bf8512 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RenamedNamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.UUID; @@ -22,8 +22,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; /** * A {@link NamedNull} term that has been renamed during parsing. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java similarity index 88% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java index 4ffbae0de..acd038dc0 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.Set; import java.util.stream.Collectors; @@ -26,13 +26,13 @@ import java.util.stream.Stream; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /** * Implementation for {@link Rule}. Represents rules with non-empty heads and diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 2d10dc4a5..6ee191e48 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,11 +1,11 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import java.util.List; import java.util.Map.Entry; import java.util.function.Function; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; /*- * #%L @@ -27,25 +27,25 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; /** * A utility class with static methods to obtain the correct parsable string diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java similarity index 86% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java index e21cf3e9c..1211841b4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/UniversalVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/AcyclicityNotion.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/AcyclicityNotion.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java index 0731a8838..36676c4ae 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/AcyclicityNotion.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java index 8952a039c..ea3994d5a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java index 93028cca9..1c6d077bd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Correctness.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/CyclicityResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/CyclicityResult.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java index 0ce1fca55..2e289278b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/CyclicityResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index e7b0e8a77..90cc3bb74 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -37,17 +37,17 @@ import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.exceptions.VLog4jException; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.VLog4jException; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; /** * A knowledge base with rules, facts, and declarations for loading data from @@ -449,10 +449,10 @@ Map> getFactsByPredicate() { * KnowledgeBase. * * This is essentially - * {@link org.semanticweb.vlog4j.parser.RuleParser#parseInto}, but we need to + * {@link org.semanticweb.rulewerk.parser.RuleParser#parseInto}, but we need to * avoid a circular dependency here -- this is also why we throw * {@link VLog4jException} instead of - * {@link org.semanticweb.vlog4j.parser.ParsingException}. + * {@link org.semanticweb.rulewerk.parser.ParsingException}. */ @FunctionalInterface public interface AdditionalInputParser { diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java index ddbd4fa7a..127504d46 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseListener.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -22,7 +22,7 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Statement; /** * Listener to {@link KnowledgeBase} content change events. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java index 9b9ca1e1d..fa1a54d45 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java index 3438c5e34..56cf95bcf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryAnswerCount.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java similarity index 92% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java index 743497b26..99d08f05e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/QueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -22,7 +22,7 @@ import java.util.Iterator; -import org.semanticweb.vlog4j.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.QueryResult; /** * Iterator for {@link QueryResult}s. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 444d4c615..04138ef5a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -1,20 +1,20 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java similarity index 97% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java index 5a30d7359..bf22ef019 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /* * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java similarity index 91% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java index 142d2ea03..3aecb060a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 7998dd466..fee712e49 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -25,7 +25,7 @@ import java.util.Arrays; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java similarity index 82% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java index 1d1cd1575..9c48bbb10 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; /** * Iterator that represents an empty query result. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java similarity index 98% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 8c5fb7e21..b0761e238 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java similarity index 95% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index c628cd023..13a7066a8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -22,8 +22,8 @@ import java.util.Arrays; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; /** * A {@link DataSource} for representing a large number of facts that were diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index 25d089fee..1f2f943ee 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -23,19 +23,19 @@ import java.util.Collection; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; /** * Utility class with static methods for converting from VLog API model objects - * ({@code org.semanticweb.vlog4j.core.model}) to internal VLog model objects + * ({@code org.semanticweb.rulewerk.core.model}) to internal VLog model objects * ({@code karmaresearch.vlog}). * * @author Irina Dragoste diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java similarity index 88% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java index 68deeb6f0..27814ab4e 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -1,7 +1,7 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; /*- * #%L diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index b02bf4153..6727fd558 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /* * #%L @@ -22,8 +22,8 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; /** * Implements {@link QueryResult}s. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java similarity index 94% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index eb3ce09ea..43e1c44b1 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -25,7 +25,7 @@ import java.util.Arrays; import java.util.Optional; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index 2a6269ebb..ddde0498a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -24,8 +24,8 @@ import java.io.IOException; import java.util.UUID; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** * A class that implements skolemization of named null names. The same diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java similarity index 96% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 89db26939..b105f8d82 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -27,8 +27,8 @@ import org.apache.commons.lang3.Validate; import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * A SparqlQueryResultDataSource provide the results of a SPARQL query on a diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java similarity index 84% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 400943c75..b83cc7a12 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,15 +20,15 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.ExistentialVariable; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; /** * A visitor that converts {@link Term}s of different types to corresponding diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java similarity index 90% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java index 0cab0e979..d03b1b118 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSource; /** * Abstract base class for VLog-specific data sources. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java similarity index 89% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java index e121399a9..c66b3094d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -30,22 +30,22 @@ import java.util.Map.Entry; import java.util.Set; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.StatementVisitor; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; /** * Class for organizing a Knowledge Base using vLog-specific data structures. diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java index 043aea636..835fe699a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /* * #%L @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; import karmaresearch.vlog.Term; import karmaresearch.vlog.TermQueryResultIterator; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java similarity index 93% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index cdec467b5..a6b48b5bf 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import java.io.IOException; import java.io.OutputStream; @@ -10,30 +10,30 @@ import java.util.Set; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Statement; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.core.reasoner.AcyclicityNotion; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.CyclicityResult; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.QueryAnswerCount; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.AcyclicityNotion; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.CyclicityResult; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java similarity index 86% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java index 3be4fdebf..3864b4fb7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /* * #%L @@ -23,19 +23,19 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** * Utility class with static methods for converting from VLog internal model * ({@code karmaresearch.vlog} objects) to VLog API model - * ({@code org.semanticweb.vlog4j.core.model.api}) objects. + * ({@code org.semanticweb.rulewerk.core.model.api}) objects. * * @author Irina Dragoste * diff --git a/vlog4j-core/src/test/data/input/binaryFacts.csv b/rulewerk-core/src/test/data/input/binaryFacts.csv similarity index 100% rename from vlog4j-core/src/test/data/input/binaryFacts.csv rename to rulewerk-core/src/test/data/input/binaryFacts.csv diff --git a/vlog4j-core/src/test/data/input/constantD.csv b/rulewerk-core/src/test/data/input/constantD.csv similarity index 100% rename from vlog4j-core/src/test/data/input/constantD.csv rename to rulewerk-core/src/test/data/input/constantD.csv diff --git a/vlog4j-core/src/test/data/input/empty.csv b/rulewerk-core/src/test/data/input/empty.csv similarity index 100% rename from vlog4j-core/src/test/data/input/empty.csv rename to rulewerk-core/src/test/data/input/empty.csv diff --git a/vlog4j-core/src/test/data/input/invalidFormatNtFacts.nt b/rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt similarity index 100% rename from vlog4j-core/src/test/data/input/invalidFormatNtFacts.nt rename to rulewerk-core/src/test/data/input/invalidFormatNtFacts.nt diff --git a/vlog4j-core/src/test/data/input/ternaryFacts.nt b/rulewerk-core/src/test/data/input/ternaryFacts.nt similarity index 100% rename from vlog4j-core/src/test/data/input/ternaryFacts.nt rename to rulewerk-core/src/test/data/input/ternaryFacts.nt diff --git a/vlog4j-core/src/test/data/input/ternaryFactsZipped.nt.gz b/rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz similarity index 100% rename from vlog4j-core/src/test/data/input/ternaryFactsZipped.nt.gz rename to rulewerk-core/src/test/data/input/ternaryFactsZipped.nt.gz diff --git a/vlog4j-core/src/test/data/input/unaryFacts.csv b/rulewerk-core/src/test/data/input/unaryFacts.csv similarity index 100% rename from vlog4j-core/src/test/data/input/unaryFacts.csv rename to rulewerk-core/src/test/data/input/unaryFacts.csv diff --git a/vlog4j-core/src/test/data/input/unaryFactsCD.csv b/rulewerk-core/src/test/data/input/unaryFactsCD.csv similarity index 100% rename from vlog4j-core/src/test/data/input/unaryFactsCD.csv rename to rulewerk-core/src/test/data/input/unaryFactsCD.csv diff --git a/vlog4j-core/src/test/data/input/unaryFactsZipped.csv.gz b/rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz similarity index 100% rename from vlog4j-core/src/test/data/input/unaryFactsZipped.csv.gz rename to rulewerk-core/src/test/data/input/unaryFactsZipped.csv.gz diff --git a/vlog4j-core/src/test/data/output/.keep b/rulewerk-core/src/test/data/output/.keep similarity index 100% rename from vlog4j-core/src/test/data/output/.keep rename to rulewerk-core/src/test/data/output/.keep diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java index db8fdcf30..2e6dba525 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -29,14 +29,14 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class ConjunctionImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java similarity index 88% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index c02772192..880172977 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -30,15 +30,15 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; public class DataSourceDeclarationTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java similarity index 81% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java index 0de3182d3..70763c3a4 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/FactTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,12 +24,12 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; public class FactTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 5aad3d706..c6202e864 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,9 +24,9 @@ import org.junit.Before; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; public class MergingPrefixDeclarationRegistryTest { private MergingPrefixDeclarationRegistry prefixDeclarations; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java index d22881e84..c97f71504 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/NegativeLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -28,15 +28,15 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.NegativeLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.NegativeLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; public class NegativeLiteralImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java index 7c8d791a7..265f096c9 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PositiveLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -26,15 +26,15 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.NegativeLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.NegativeLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; public class PositiveLiteralImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java index 508f4c90d..7c398b37b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -25,9 +25,9 @@ import static org.junit.Assert.assertNotEquals; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; public class PredicateImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java similarity index 90% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java index 0a406ec18..47c759252 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -27,17 +27,17 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; public class RuleImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java similarity index 87% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java index 5093d5116..77f47b7b6 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.model; +package org.semanticweb.rulewerk.core.model; /*- * #%L @@ -24,17 +24,17 @@ import static org.junit.Assert.assertNotEquals; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; -import org.semanticweb.vlog4j.core.model.api.LanguageStringConstant; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.ExistentialVariableImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; public class TermImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java similarity index 92% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 8e0531a67..1305e8acc 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L @@ -27,10 +27,10 @@ import org.junit.Before; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; public class KnowledgeBaseTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java similarity index 92% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java index c08ad616c..6b51bfe7a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -34,13 +34,13 @@ import org.junit.BeforeClass; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; public class LoggingTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java similarity index 91% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java index 226aee845..8e64d2915 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/ReasonerTimeoutTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -33,13 +33,13 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.Timeout; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index db5c356ae..5e073a77e 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -32,16 +32,16 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class AddDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java index 40c21f29d..be9efb6c2 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -36,19 +36,19 @@ import org.junit.Assert; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; public class AnswerQueryTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java similarity index 95% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index 930fdf785..22cc04bbb 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -26,7 +26,7 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; public class CsvFileDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index a2b1b8036..945c4482a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -33,13 +33,13 @@ import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; /** * Utility class for reading from and writing to data source files. diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index cb173d5e4..1460a5a7f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -31,16 +31,16 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; public class GeneratedAnonymousIndividualsTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java similarity index 91% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index 4ef5c7322..a8b773a8d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -30,17 +30,17 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; public class ModelToVLogConverterTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java similarity index 95% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java index 4e17d8bdf..9c5a993e8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -25,16 +25,16 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; public class QueryAnswerCountTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java index 3d8eea89b..af30f5a3f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -32,20 +32,20 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class QueryAnsweringCorrectnessTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java similarity index 80% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java index 642f38985..0335dd845 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -28,11 +28,11 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultImpl; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; public class QueryResultImplTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java similarity index 88% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java index 1ec594328..6bfbdf501 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -26,9 +26,9 @@ import java.util.List; import java.util.Set; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; /** * Utility class with static methods for collecting the results of a query for diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index ba238ae78..ba4730f83 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; @@ -26,7 +26,7 @@ import java.io.IOException; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; public class RdfFileDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java similarity index 95% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index bebe220d2..da644f50d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -26,7 +26,7 @@ import org.junit.Before; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.NamedNull; public class SkolemizationTest { private Skolemization skolemization; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index bd950350d..fdc5ba789 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -30,8 +30,8 @@ import org.apache.commons.lang3.StringUtils; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class SparqlQueryResultDataSourceTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java similarity index 80% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java index b4e3a5aec..d27cb9282 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerBasics.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -30,16 +30,16 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; public class VLogReasonerBasics { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java similarity index 88% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 084c1b321..dae941489 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertEquals; @@ -32,17 +32,17 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerCombinedInputs { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java similarity index 85% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java index 2b7d85c58..a7d1f066a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -31,17 +31,17 @@ import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerCsvInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java index 7f1c62838..3c4e023f3 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -31,12 +31,12 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; public class VLogReasonerCsvOutput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java similarity index 83% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java index 10c4226fa..213889d55 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -28,16 +28,16 @@ import java.util.Arrays; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class VLogReasonerNegation { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java similarity index 90% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java index d278116a3..601e6f57b 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -32,13 +32,13 @@ import org.junit.Ignore; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerRdfInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java index 521812028..f5cb44aab 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -30,14 +30,14 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; public class VLogReasonerSparqlInput { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java similarity index 93% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java index 6fb85e3c8..a524ec507 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -34,22 +34,22 @@ import java.util.Set; import org.junit.Test; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; public class VLogReasonerStateTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java similarity index 85% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 6dff4c70f..dcaf16a5a 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @@ -17,20 +17,20 @@ import org.junit.Before; import org.junit.Test; import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.UniversalVariable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java similarity index 87% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java index 64dd2469d..db398d51d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /*- * #%L @@ -23,12 +23,12 @@ import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.AbstractConstant; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; public class VLogToModelConverterTest { diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java similarity index 94% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java index c1f4c8580..8c1f38594 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -27,7 +27,7 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java index 3f4a364db..5d9f00983 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/LargeAritiesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; import static org.junit.Assert.assertArrayEquals; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java index 052841090..b4b89ae14 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/StratifiedNegationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -25,7 +25,7 @@ import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.vlog.VLogExpressions; +import org.semanticweb.rulewerk.core.reasoner.vlog.VLogExpressions; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java index a580baeb4..676dd79dd 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -28,7 +28,7 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java index cbe1d005b..1d9b7ca0d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /* * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java similarity index 96% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java index 7eaf1d6ab..622888073 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L @@ -28,7 +28,7 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.implementation.FileDataSourceTestUtils; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSourceTestUtils; import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.Atom; diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java similarity index 98% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java index 5ddb466a7..d03bf4fac 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java similarity index 97% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java index f2c4614ae..e61d46421 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java similarity index 98% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java index 801c43e65..df38a52ab 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java similarity index 98% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java index 0124f6eb6..428e31226 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.core.reasoner.vlog; /*- * #%L diff --git a/vlog4j-examples/LICENSE.txt b/rulewerk-examples/LICENSE.txt similarity index 100% rename from vlog4j-examples/LICENSE.txt rename to rulewerk-examples/LICENSE.txt diff --git a/vlog4j-examples/README.md b/rulewerk-examples/README.md similarity index 100% rename from vlog4j-examples/README.md rename to rulewerk-examples/README.md diff --git a/vlog4j-examples/pom.xml b/rulewerk-examples/pom.xml similarity index 79% rename from vlog4j-examples/pom.xml rename to rulewerk-examples/pom.xml index 1e23eb09e..216564049 100644 --- a/vlog4j-examples/pom.xml +++ b/rulewerk-examples/pom.xml @@ -1,90 +1,89 @@ - - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.6.0-SNAPSHOT - - - vlog4j-examples - jar - - VLog4j Examples - Contains examples and usage instructions describing the basic functionality of VLog4j - - - - ${project.groupId} - vlog4j-core - ${project.version} - - - ${project.groupId} - vlog4j-owlapi - ${project.version} - - - ${project.groupId} - vlog4j-rdf - ${project.version} - - - ${project.groupId} - vlog4j-graal - ${project.version} - - - ${project.groupId} - vlog4j-parser - ${project.version} - - - org.slf4j - slf4j-log4j12 - ${slf4j.version} - - - - - org.openrdf.sesame - sesame-rio-turtle - - ${openrdf.sesame.version} - - - - - org.openrdf.sesame - sesame-rio-rdfxml - - ${openrdf.sesame.version} - - - - fr.lirmm.graphik - graal-io-dlgp - ${graal.version} - - - - - - - - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 - - true - true - - - - - - + + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-examples + jar + + Rulewerk Examples + Contains examples and usage instructions describing the basic functionality of Rulewerk + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + + + + + org.openrdf.sesame + sesame-rio-turtle + + ${openrdf.sesame.version} + + + + + org.openrdf.sesame + sesame-rio-rdfxml + + ${openrdf.sesame.version} + + + + fr.lirmm.graphik + graal-io-dlgp + ${graal.version} + + + + + + + + + org.codehaus.mojo + cobertura-maven-plugin + 2.7 + + true + true + + + + + diff --git a/vlog4j-examples/src/main/data/.gitignore b/rulewerk-examples/src/main/data/.gitignore similarity index 100% rename from vlog4j-examples/src/main/data/.gitignore rename to rulewerk-examples/src/main/data/.gitignore diff --git a/vlog4j-examples/src/main/data/input/bicycleEDB.csv.gz b/rulewerk-examples/src/main/data/input/bicycleEDB.csv.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/bicycleEDB.csv.gz rename to rulewerk-examples/src/main/data/input/bicycleEDB.csv.gz diff --git a/vlog4j-examples/src/main/data/input/counting-triangles.rls b/rulewerk-examples/src/main/data/input/counting-triangles.rls similarity index 100% rename from vlog4j-examples/src/main/data/input/counting-triangles.rls rename to rulewerk-examples/src/main/data/input/counting-triangles.rls diff --git a/vlog4j-examples/src/main/data/input/doid.nt.gz b/rulewerk-examples/src/main/data/input/doid.nt.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/doid.nt.gz rename to rulewerk-examples/src/main/data/input/doid.nt.gz diff --git a/vlog4j-examples/src/main/data/input/doid.rls b/rulewerk-examples/src/main/data/input/doid.rls similarity index 100% rename from vlog4j-examples/src/main/data/input/doid.rls rename to rulewerk-examples/src/main/data/input/doid.rls diff --git a/vlog4j-examples/src/main/data/input/graal/doid-example.dlgp b/rulewerk-examples/src/main/data/input/graal/doid-example.dlgp similarity index 100% rename from vlog4j-examples/src/main/data/input/graal/doid-example.dlgp rename to rulewerk-examples/src/main/data/input/graal/doid-example.dlgp diff --git a/vlog4j-examples/src/main/data/input/graal/example.dlgp b/rulewerk-examples/src/main/data/input/graal/example.dlgp similarity index 100% rename from vlog4j-examples/src/main/data/input/graal/example.dlgp rename to rulewerk-examples/src/main/data/input/graal/example.dlgp diff --git a/vlog4j-examples/src/main/data/input/hasPartEDB.csv.gz b/rulewerk-examples/src/main/data/input/hasPartEDB.csv.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/hasPartEDB.csv.gz rename to rulewerk-examples/src/main/data/input/hasPartEDB.csv.gz diff --git a/vlog4j-examples/src/main/data/input/owl/bike.owl b/rulewerk-examples/src/main/data/input/owl/bike.owl similarity index 100% rename from vlog4j-examples/src/main/data/input/owl/bike.owl rename to rulewerk-examples/src/main/data/input/owl/bike.owl diff --git a/vlog4j-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf b/rulewerk-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf similarity index 100% rename from vlog4j-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf rename to rulewerk-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf diff --git a/vlog4j-examples/src/main/data/input/ternaryBicycleEDB.nt.gz b/rulewerk-examples/src/main/data/input/ternaryBicycleEDB.nt.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/ternaryBicycleEDB.nt.gz rename to rulewerk-examples/src/main/data/input/ternaryBicycleEDB.nt.gz diff --git a/vlog4j-examples/src/main/data/input/wheelEDB.csv.gz b/rulewerk-examples/src/main/data/input/wheelEDB.csv.gz similarity index 100% rename from vlog4j-examples/src/main/data/input/wheelEDB.csv.gz rename to rulewerk-examples/src/main/data/input/wheelEDB.csv.gz diff --git a/vlog4j-examples/src/main/data/output/.keep b/rulewerk-examples/src/main/data/output/.keep similarity index 100% rename from vlog4j-examples/src/main/data/output/.keep rename to rulewerk-examples/src/main/data/output/.keep diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java similarity index 92% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index 0e7d18b32..838a261f5 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -22,11 +22,11 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how to integrate and compare the contents of two SPARQL diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java similarity index 89% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java index a97e5438e..fede387b0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/CountingTriangles.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; import java.io.FileInputStream; @@ -24,10 +24,10 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * In this example we count the number of triangles in the reflexive diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java similarity index 86% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java index e919be73c..2f895847e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/DoidExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -25,12 +25,12 @@ import java.util.Arrays; import java.util.List; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example reasons about human diseases, based on information from the diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java similarity index 87% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index f39fbd664..dafb60680 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -28,15 +28,15 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; -import org.semanticweb.vlog4j.core.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Correctness; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; public final class ExamplesUtils { @@ -61,7 +61,7 @@ private ExamplesUtils() { * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using - * {@link Reasoner#setLogLevel(org.semanticweb.vlog4j.core.reasoner.LogLevel)}. + * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. * It is also possible to specify a separate log file for this part of the logs. */ public static void configureLogging() { diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java similarity index 85% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 3a1702e91..6ea419e31 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -22,14 +22,14 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.InMemoryDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how to reason efficiently with data sets generated in diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java similarity index 90% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index b269047b7..9591dd3f0 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples; +package org.semanticweb.rulewerk.examples; /*- * #%L @@ -22,11 +22,11 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example demonstrates the basic usage of VLog4j for rule reasoning. We diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java similarity index 87% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java index d493a3eb5..9fcb968a3 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromCsvFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -22,14 +22,14 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how facts can be imported from files in the CSV format. diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java similarity index 86% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java index 3a2cd3f8f..8e974a814 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromRdfFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -22,15 +22,15 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows how facts can be imported from files in the RDF N-Triples diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java similarity index 86% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index fef9cfb81..6f3a92d3f 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -26,20 +26,20 @@ import java.util.LinkedHashSet; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.vlog4j.examples.ExamplesUtils; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.examples.ExamplesUtils; /** * This is a simple example of adding data from the result of a SPARQL query on diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java similarity index 92% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index 4a429bedb..fdf14be9c 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -23,11 +23,11 @@ import java.io.IOException; import org.eclipse.jdt.annotation.Nullable; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This class exemplifies setting a log file and log level for VLog reasoner diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java similarity index 88% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java index f9b46ff59..387893b9d 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.core; +package org.semanticweb.rulewerk.examples.core; /*- * #%L @@ -22,13 +22,13 @@ import java.io.IOException; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; /** * This example shows non-termination of the Skolem Chase, versus termination of diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java similarity index 93% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 07bfafa49..ccd12e5ec 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.graal; +package org.semanticweb.rulewerk.examples.graal; /*- * #%L @@ -27,11 +27,11 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; +import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; import fr.lirmm.graphik.graal.api.core.Atom; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java similarity index 91% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index 497acef27..55e498784 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.graal; +package org.semanticweb.rulewerk.examples.graal; /*- * #%L @@ -24,13 +24,13 @@ import java.util.ArrayList; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; +import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java similarity index 84% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index ecb80b742..9e7b7504e 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.graal; +package org.semanticweb.rulewerk.examples.graal; /*- * #%L @@ -24,21 +24,21 @@ import java.io.IOException; import java.net.URL; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.NegativeLiteral; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.RdfFileDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.DoidExample; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.DoidExample; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java similarity index 84% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index f738449f4..67be04091 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.owlapi; +package org.semanticweb.rulewerk.examples.owlapi; /*- * #%L @@ -29,18 +29,18 @@ import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.owlapi.OwlToRulesConverter; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; /** * This example shows how vlog4j-owlapi library (class diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java similarity index 87% rename from vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java rename to rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index cbb848562..d1aaa22e3 100644 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.examples.rdf; +package org.semanticweb.rulewerk.examples.rdf; /*- * #%L @@ -37,19 +37,19 @@ import org.openrdf.rio.RDFParser; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.vlog4j.examples.ExamplesUtils; -import org.semanticweb.vlog4j.parser.ParsingException; -import org.semanticweb.vlog4j.parser.RuleParser; -import org.semanticweb.vlog4j.rdf.RdfModelConverter; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** * This example shows how vlog4j-rdf library's utility class diff --git a/vlog4j-examples/src/main/logs/.keep b/rulewerk-examples/src/main/logs/.keep similarity index 100% rename from vlog4j-examples/src/main/logs/.keep rename to rulewerk-examples/src/main/logs/.keep diff --git a/vlog4j-graal/LICENSE.txt b/rulewerk-graal/LICENSE.txt similarity index 100% rename from vlog4j-graal/LICENSE.txt rename to rulewerk-graal/LICENSE.txt diff --git a/vlog4j-graal/pom.xml b/rulewerk-graal/pom.xml similarity index 80% rename from vlog4j-graal/pom.xml rename to rulewerk-graal/pom.xml index 7487cf100..f58afbd16 100644 --- a/vlog4j-graal/pom.xml +++ b/rulewerk-graal/pom.xml @@ -4,15 +4,15 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-graal + rulewerk-graal jar - VLog4J Graal Import Components + Rulewerk Graal Import Components Components to import Graal data structures. @@ -29,7 +29,7 @@ ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConjunctiveQueryToRule.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java similarity index 90% rename from vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConjunctiveQueryToRule.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java index a740f6274..773bed754 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConjunctiveQueryToRule.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- @@ -24,12 +24,12 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java similarity index 96% rename from vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConvertException.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index 33646fbf4..e02365c20 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- * #%L diff --git a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java similarity index 96% rename from vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java index 5b93b173a..3a03c5993 100644 --- a/vlog4j-graal/src/main/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- * #%L @@ -27,13 +27,13 @@ import java.util.Set; import org.apache.commons.lang3.StringUtils; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import fr.lirmm.graphik.graal.api.core.AtomSet; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; diff --git a/vlog4j-graal/src/test/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java similarity index 96% rename from vlog4j-graal/src/test/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverterTest.java rename to rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java index 3642c100e..3e81a6909 100644 --- a/vlog4j-graal/src/test/java/org/semanticweb/vlog4j/graal/GraalToVLog4JModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.graal; +package org.semanticweb.rulewerk.graal; /*- * #%L @@ -27,13 +27,13 @@ import java.util.Collections; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; import fr.lirmm.graphik.graal.api.io.ParseException; diff --git a/vlog4j-owlapi/LICENSE.txt b/rulewerk-owlapi/LICENSE.txt similarity index 100% rename from vlog4j-owlapi/LICENSE.txt rename to rulewerk-owlapi/LICENSE.txt diff --git a/vlog4j-owlapi/pom.xml b/rulewerk-owlapi/pom.xml similarity index 79% rename from vlog4j-owlapi/pom.xml rename to rulewerk-owlapi/pom.xml index 3433c9bee..6e9fab4c3 100644 --- a/vlog4j-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -1,36 +1,36 @@ - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.6.0-SNAPSHOT - - - vlog4j-owlapi - jar - - VLog4j OWL API Support - Bindings and utilities for working with OWL ontologies using the OWL API - - - - net.sourceforge.owlapi - owlapi-apibinding - ${owlapi.version} - - - net.sourceforge.owlapi - owlapi-api - ${owlapi.version} - - - ${project.groupId} - vlog4j-core - ${project.version} - - - - + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.6.0-SNAPSHOT + + + rulewerk-owlapi + jar + + Rulewerk OWL API Support + Bindings and utilities for working with OWL ontologies using the OWL API + + + + net.sourceforge.owlapi + owlapi-apibinding + ${owlapi.version} + + + net.sourceforge.owlapi + owlapi-api + ${owlapi.version} + + + ${project.groupId} + rulewerk-core + ${project.version} + + + + diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java similarity index 96% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java index 3285795c7..fdee7057b 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -29,11 +29,11 @@ import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLClassExpressionVisitor; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Abstract base class for converters that create rules from OWL class diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java similarity index 95% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java index 027ebb4af..d77a95389 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -42,10 +42,10 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Helper class for transforming OWL class expressions that occur as subclasses diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java similarity index 96% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java index 79dc8b3cf..4a958d114 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -42,9 +42,9 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Helper class for transforming OWL class expressions that occur as diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java similarity index 95% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 09e788f6a..f16abe0f0 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; import java.util.ArrayList; import java.util.Arrays; @@ -68,21 +68,21 @@ import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.SWRLRule; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Literal; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.ExistentialVariableImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; /** * Class for converting OWL axioms to rules. diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java similarity index 96% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index d3ddbf4cf..254da4dcd 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java similarity index 90% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 02b63aae2..2a6f7ea05 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; import java.io.UnsupportedEncodingException; import java.math.BigInteger; @@ -35,16 +35,16 @@ import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.FactImpl; -import org.semanticweb.vlog4j.core.model.implementation.PositiveLiteralImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.owlapi.AbstractClassToRuleConverter.SimpleConjunction; /** * Utility class for helper functions that are used to convert OWL API objects diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java similarity index 92% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index 2def20dc8..e5386caa4 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -23,8 +23,8 @@ import java.util.Set; import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; /** * Class for converting OWL ontologies to rules. diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java similarity index 98% rename from vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java rename to rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java index dd101601b..6ed6fee3a 100644 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L @@ -42,13 +42,13 @@ import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class OwlAxiomToRulesConverterTest { diff --git a/vlog4j-parser/LICENSE.txt b/rulewerk-parser/LICENSE.txt similarity index 100% rename from vlog4j-parser/LICENSE.txt rename to rulewerk-parser/LICENSE.txt diff --git a/vlog4j-parser/pom.xml b/rulewerk-parser/pom.xml similarity index 93% rename from vlog4j-parser/pom.xml rename to rulewerk-parser/pom.xml index b8a7b07f4..70df4a5b6 100644 --- a/vlog4j-parser/pom.xml +++ b/rulewerk-parser/pom.xml @@ -6,14 +6,14 @@ 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-parser + rulewerk-parser - VLog4j Parser + Rulewerk Parser http://maven.apache.org UTF-8 @@ -21,7 +21,7 @@ ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} @@ -36,7 +36,7 @@ ruleparser - ${basedir}/src/main/java/org/semanticweb/vlog4j/parser/javacc/ + ${basedir}/src/main/java/org/semanticweb/rulewerk/parser/javacc/ javacc diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java similarity index 89% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index bd5b14c24..a6cd79f27 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,8 +20,8 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing a configurable literal expression. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java similarity index 89% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java index bbc6a359c..bf89afe17 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSource; /** * Handler for parsing a custom Data Source declaration. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java similarity index 91% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java index eec3b4fcd..12ad24f5e 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DatatypeConstantHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,7 +20,7 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; /** * Handler for parsing a custom Datatype constant. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java similarity index 74% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index 625f6f87c..f34bbc8c8 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,12 +20,12 @@ * #L% */ -import org.semanticweb.vlog4j.core.model.implementation.Serializer; -import org.semanticweb.vlog4j.parser.datasources.CsvFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.RdfFileDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; -import org.semanticweb.vlog4j.parser.directives.ImportFileDirectiveHandler; -import org.semanticweb.vlog4j.parser.directives.ImportFileRelativeDirectiveHandler; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.directives.ImportFileDirectiveHandler; +import org.semanticweb.rulewerk.parser.directives.ImportFileRelativeDirectiveHandler; /** * Default parser configuration. Registers default data sources. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java similarity index 98% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java index 51190723e..4d1b77764 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveArgument.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -24,7 +24,7 @@ import java.util.Optional; import java.util.function.Function; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Term; /** * A tagged union representing the possible types allowed to appear as arguments diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java similarity index 95% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index 69d772f70..fca86b07b 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -28,11 +28,11 @@ import java.util.List; import java.util.NoSuchElementException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParser; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Handler for parsing a custom directive. diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java similarity index 90% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index e5e3ddba3..4319a79ab 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.vlog4j.core.exceptions.PrefixDeclarationException; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.implementation.AbstractPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.AbstractPrefixDeclarationRegistry; /** * Implementation of {@link PrefixDeclarationRegistry} that is used when parsing diff --git a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java similarity index 90% rename from vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java rename to rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 861781378..22b268165 100644 --- a/vlog4j-parser/src/main/java/org/semanticweb/vlog4j/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.parser; +package org.semanticweb.rulewerk.parser; /*- * #%L @@ -25,15 +25,15 @@ import java.util.List; import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.DataSource; -import org.semanticweb.vlog4j.core.model.api.DataSourceDeclaration; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; -import org.semanticweb.vlog4j.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; /** * Class to keep parser configuration. @@ -146,7 +146,7 @@ public Constant parseDatatypeConstant(final String lexicalForm, final String dat /** * Check if a handler for this - * {@link org.semanticweb.vlog4j.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter} + * {@link org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter} * is registered * * @param delimiter delimiter to check. @@ -266,7 +266,7 @@ public KnowledgeBase parseDirectiveStatement(String name, List4.0.0 - org.semanticweb.vlog4j - vlog4j-parent + org.semanticweb.rulewerk + rulewerk-parent 0.6.0-SNAPSHOT - vlog4j-rdf + rulewerk-rdf jar - VLog4j RDF Support + Rulewerk RDF Support Bindings and utilities for working with RDF data ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java similarity index 89% rename from vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java rename to rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index 0c58bb826..e7473f587 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -31,13 +31,13 @@ import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; /** * Class for converting RDF {@link Model}s to {@link PositiveLiteral} sets. diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java similarity index 81% rename from vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java rename to rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index 361da7991..59b0ac934 100644 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -26,12 +26,12 @@ import org.openrdf.model.Value; import org.openrdf.model.datatypes.XMLDatatypeUtil; import org.openrdf.rio.ntriples.NTriplesUtil; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.NamedNullImpl; -import org.semanticweb.vlog4j.core.model.implementation.AbstractConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.DatatypeConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; final class RdfValueToTermConverter { diff --git a/vlog4j-rdf/src/test/data/input/collections.ttl b/rulewerk-rdf/src/test/data/input/collections.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/collections.ttl rename to rulewerk-rdf/src/test/data/input/collections.ttl diff --git a/vlog4j-rdf/src/test/data/input/escapedCharacters.ttl b/rulewerk-rdf/src/test/data/input/escapedCharacters.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/escapedCharacters.ttl rename to rulewerk-rdf/src/test/data/input/escapedCharacters.ttl diff --git a/vlog4j-rdf/src/test/data/input/exampleFacts.ttl b/rulewerk-rdf/src/test/data/input/exampleFacts.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/exampleFacts.ttl rename to rulewerk-rdf/src/test/data/input/exampleFacts.ttl diff --git a/vlog4j-rdf/src/test/data/input/labelledBNodes.ttl b/rulewerk-rdf/src/test/data/input/labelledBNodes.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/labelledBNodes.ttl rename to rulewerk-rdf/src/test/data/input/labelledBNodes.ttl diff --git a/vlog4j-rdf/src/test/data/input/languageTags.ttl b/rulewerk-rdf/src/test/data/input/languageTags.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/languageTags.ttl rename to rulewerk-rdf/src/test/data/input/languageTags.ttl diff --git a/vlog4j-rdf/src/test/data/input/literalValues.ttl b/rulewerk-rdf/src/test/data/input/literalValues.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/literalValues.ttl rename to rulewerk-rdf/src/test/data/input/literalValues.ttl diff --git a/vlog4j-rdf/src/test/data/input/relativeURIs.ttl b/rulewerk-rdf/src/test/data/input/relativeURIs.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/relativeURIs.ttl rename to rulewerk-rdf/src/test/data/input/relativeURIs.ttl diff --git a/vlog4j-rdf/src/test/data/input/unlabelledBNodes.ttl b/rulewerk-rdf/src/test/data/input/unlabelledBNodes.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/unlabelledBNodes.ttl rename to rulewerk-rdf/src/test/data/input/unlabelledBNodes.ttl diff --git a/vlog4j-rdf/src/test/data/input/unnormalizedLiteralValues.ttl b/rulewerk-rdf/src/test/data/input/unnormalizedLiteralValues.ttl similarity index 100% rename from vlog4j-rdf/src/test/data/input/unnormalizedLiteralValues.ttl rename to rulewerk-rdf/src/test/data/input/unnormalizedLiteralValues.ttl diff --git a/vlog4j-rdf/src/test/data/output/.keep b/rulewerk-rdf/src/test/data/output/.keep similarity index 100% rename from vlog4j-rdf/src/test/data/output/.keep rename to rulewerk-rdf/src/test/data/output/.keep diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/RdfTestUtils.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java similarity index 90% rename from vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/RdfTestUtils.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java index 700c3db4d..ca400540f 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/RdfTestUtils.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -35,11 +35,11 @@ import org.openrdf.rio.RDFParser; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public final class RdfTestUtils { diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java similarity index 94% rename from vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java index b5ab85281..c0f126e15 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -22,10 +22,10 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.semanticweb.vlog4j.rdf.RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; -import static org.semanticweb.vlog4j.rdf.RdfTestUtils.RDF_FIRST; -import static org.semanticweb.vlog4j.rdf.RdfTestUtils.RDF_NIL; -import static org.semanticweb.vlog4j.rdf.RdfTestUtils.RDF_REST; +import static org.semanticweb.rulewerk.rdf.RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_FIRST; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_NIL; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_REST; import java.io.File; import java.io.IOException; @@ -40,12 +40,12 @@ import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.model.api.NamedNull; -import org.semanticweb.vlog4j.core.model.api.PrefixDeclarationRegistry; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; public class TestConvertRdfFileToFacts { diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java similarity index 85% rename from vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java rename to rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index 65cc79ef9..fc24fcaf5 100644 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -1,4 +1,4 @@ -package org.semanticweb.vlog4j.rdf; +package org.semanticweb.rulewerk.rdf; /*- * #%L @@ -35,16 +35,16 @@ import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Fact; -import org.semanticweb.vlog4j.core.model.api.PositiveLiteral; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.KnowledgeBase; -import org.semanticweb.vlog4j.core.reasoner.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; public class TestReasonOverRdfFacts { From b2d825a52c72f2504927d12718c11a7c33c652ca Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 14:51:10 +0100 Subject: [PATCH 0819/1255] Update license headers --- .../rulewerk/client/picocli/ClientUtils.java | 8 +- .../client/picocli/PrintQueryResults.java | 8 +- .../rulewerk/client/picocli/SaveModel.java | 8 +- .../client/picocli/SaveQueryResults.java | 8 +- .../rulewerk/client/picocli/VLog4jClient.java | 8 +- .../picocli/VLog4jClientMaterialize.java | 8 +- .../client/picocli/PrintQueryResultsTest.java | 6 +- .../client/picocli/SaveModelTest.java | 8 +- .../client/picocli/SaveQueryResultsTest.java | 8 +- .../IncompatiblePredicateArityException.java | 4 +- .../PrefixDeclarationException.java | 8 +- .../exceptions/ReasonerStateException.java | 4 +- .../core/exceptions/VLog4jException.java | 8 +- .../exceptions/VLog4jRuntimeException.java | 4 +- .../core/model/api/AbstractConstant.java | 8 +- .../rulewerk/core/model/api/Conjunction.java | 4 +- .../rulewerk/core/model/api/Constant.java | 4 +- .../rulewerk/core/model/api/DataSource.java | 8 +- .../core/model/api/DataSourceDeclaration.java | 4 +- .../core/model/api/DatatypeConstant.java | 8 +- .../rulewerk/core/model/api/Entity.java | 4 +- .../core/model/api/ExistentialVariable.java | 8 +- .../rulewerk/core/model/api/Fact.java | 4 +- .../model/api/LanguageStringConstant.java | 8 +- .../rulewerk/core/model/api/Literal.java | 4 +- .../rulewerk/core/model/api/NamedNull.java | 8 +- .../core/model/api/NegativeLiteral.java | 4 +- .../core/model/api/PositiveLiteral.java | 4 +- .../rulewerk/core/model/api/Predicate.java | 8 +- .../model/api/PrefixDeclarationRegistry.java | 8 +- .../rulewerk/core/model/api/QueryResult.java | 86 +-- .../rulewerk/core/model/api/Rule.java | 4 +- .../rulewerk/core/model/api/Statement.java | 4 +- .../core/model/api/StatementVisitor.java | 4 +- .../rulewerk/core/model/api/SyntaxObject.java | 4 +- .../rulewerk/core/model/api/Term.java | 8 +- .../rulewerk/core/model/api/TermType.java | 4 +- .../rulewerk/core/model/api/TermVisitor.java | 4 +- .../rulewerk/core/model/api/Terms.java | 4 +- .../core/model/api/UniversalVariable.java | 8 +- .../rulewerk/core/model/api/Variable.java | 8 +- .../implementation/AbstractConstantImpl.java | 8 +- .../implementation/AbstractLiteralImpl.java | 4 +- .../AbstractPrefixDeclarationRegistry.java | 8 +- .../implementation/AbstractTermImpl.java | 8 +- .../model/implementation/ConjunctionImpl.java | 4 +- .../DataSourceDeclarationImpl.java | 4 +- .../implementation/DatatypeConstantImpl.java | 4 +- .../ExistentialVariableImpl.java | 4 +- .../model/implementation/Expressions.java | 4 +- .../core/model/implementation/FactImpl.java | 4 +- .../LanguageStringConstantImpl.java | 4 +- .../MergingPrefixDeclarationRegistry.java | 8 +- .../model/implementation/NamedNullImpl.java | 8 +- .../implementation/NegativeLiteralImpl.java | 4 +- .../implementation/PositiveLiteralImpl.java | 4 +- .../model/implementation/PredicateImpl.java | 186 +++---- .../implementation/RenamedNamedNull.java | 8 +- .../core/model/implementation/RuleImpl.java | 4 +- .../core/model/implementation/Serializer.java | 8 +- .../implementation/UniversalVariableImpl.java | 8 +- .../core/reasoner/AcyclicityNotion.java | 4 +- .../rulewerk/core/reasoner/Algorithm.java | 22 +- .../rulewerk/core/reasoner/Correctness.java | 4 +- .../core/reasoner/CyclicityResult.java | 4 +- .../rulewerk/core/reasoner/KnowledgeBase.java | 8 +- .../core/reasoner/KnowledgeBaseListener.java | 4 +- .../rulewerk/core/reasoner/LogLevel.java | 4 +- .../core/reasoner/QueryAnswerCount.java | 4 +- .../core/reasoner/QueryResultIterator.java | 4 +- .../rulewerk/core/reasoner/Reasoner.java | 8 +- .../rulewerk/core/reasoner/ReasonerState.java | 8 +- .../core/reasoner/RuleRewriteStrategy.java | 64 +-- .../implementation/CsvFileDataSource.java | 8 +- .../EmptyQueryResultIterator.java | 4 +- .../implementation/FileDataSource.java | 8 +- .../implementation/InMemoryDataSource.java | 8 +- .../implementation/ModelToVLogConverter.java | 4 +- .../implementation/QueryAnswerCountImpl.java | 4 +- .../implementation/QueryResultImpl.java | 4 +- .../implementation/RdfFileDataSource.java | 8 +- .../implementation/Skolemization.java | 8 +- .../SparqlQueryResultDataSource.java | 8 +- .../implementation/TermToVLogConverter.java | 266 ++++----- .../implementation/VLogDataSource.java | 4 +- .../implementation/VLogKnowledgeBase.java | 8 +- .../VLogQueryResultIterator.java | 4 +- .../reasoner/implementation/VLogReasoner.java | 8 +- .../implementation/VLogToModelConverter.java | 4 +- .../core/model/ConjunctionImplTest.java | 4 +- .../core/model/DataSourceDeclarationTest.java | 8 +- .../rulewerk/core/model/FactTest.java | 4 +- .../MergingPrefixDeclarationRegistryTest.java | 8 +- .../core/model/NegativeLiteralImplTest.java | 4 +- .../core/model/PositiveLiteralImplTest.java | 4 +- .../core/model/PredicateImplTest.java | 8 +- .../rulewerk/core/model/RuleImplTest.java | 4 +- .../rulewerk/core/model/TermImplTest.java | 8 +- .../core/reasoner/KnowledgeBaseTest.java | 8 +- .../rulewerk/core/reasoner/LoggingTest.java | 4 +- .../core/reasoner/ReasonerTimeoutTest.java | 4 +- .../implementation/AddDataSourceTest.java | 4 +- .../implementation/AnswerQueryTest.java | 4 +- .../implementation/CsvFileDataSourceTest.java | 8 +- .../FileDataSourceTestUtils.java | 8 +- .../GeneratedAnonymousIndividualsTest.java | 4 +- .../ModelToVLogConverterTest.java | 504 +++++++++--------- .../implementation/QueryAnswerCountTest.java | 4 +- .../QueryAnsweringCorrectnessTest.java | 4 +- .../implementation/QueryResultImplTest.java | 120 ++--- .../implementation/QueryResultsUtils.java | 8 +- .../implementation/RdfFileDataSourceTest.java | 8 +- .../implementation/SkolemizationTest.java | 8 +- .../SparqlQueryResultDataSourceTest.java | 4 +- .../implementation/VLogReasonerBasics.java | 216 ++++---- .../VLogReasonerCombinedInputs.java | 4 +- .../implementation/VLogReasonerCsvInput.java | 8 +- .../implementation/VLogReasonerCsvOutput.java | 4 +- .../implementation/VLogReasonerNegation.java | 4 +- .../implementation/VLogReasonerRdfInput.java | 8 +- .../VLogReasonerSparqlInput.java | 4 +- .../implementation/VLogReasonerStateTest.java | 4 +- .../VLogReasonerWriteInferencesTest.java | 8 +- .../VLogToModelConverterTest.java | 4 +- .../vlog/ExportQueryResultToCsvFileTest.java | 4 +- .../core/reasoner/vlog/LargeAritiesTest.java | 278 +++++----- .../reasoner/vlog/StratifiedNegationTest.java | 4 +- .../vlog/VLogDataFromCsvFileTest.java | 8 +- .../reasoner/vlog/VLogDataFromMemoryTest.java | 496 ++++++++--------- .../vlog/VLogDataFromRdfFileTest.java | 4 +- .../core/reasoner/vlog/VLogExpressions.java | 4 +- .../reasoner/vlog/VLogQueryResultUtils.java | 4 +- .../core/reasoner/vlog/VLogQueryTest.java | 4 +- .../core/reasoner/vlog/VLogTermNamesTest.java | 4 +- .../examples/CompareWikidataDBpedia.java | 8 +- .../rulewerk/examples/CountingTriangles.java | 6 +- .../rulewerk/examples/DoidExample.java | 4 +- .../rulewerk/examples/ExamplesUtils.java | 4 +- .../InMemoryGraphAnalysisExample.java | 4 +- .../examples/SimpleReasoningExample.java | 4 +- .../examples/core/AddDataFromCsvFile.java | 8 +- .../examples/core/AddDataFromRdfFile.java | 8 +- .../core/AddDataFromSparqlQueryResults.java | 414 +++++++------- .../core/ConfigureReasonerLogging.java | 4 +- .../SkolemVsRestrictedChaseTermination.java | 272 +++++----- .../examples/graal/AddDataFromDlgpFile.java | 4 +- .../examples/graal/AddDataFromGraal.java | 4 +- .../examples/graal/DoidExampleGraal.java | 8 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 282 +++++----- .../examples/rdf/AddDataFromRdfModel.java | 368 ++++++------- .../graal/GraalConjunctiveQueryToRule.java | 4 +- .../rulewerk/graal/GraalConvertException.java | 4 +- .../graal/GraalToVLog4JModelConverter.java | 8 +- .../GraalToVLog4JModelConverterTest.java | 4 +- .../owlapi/AbstractClassToRuleConverter.java | 8 +- .../owlapi/ClassToRuleBodyConverter.java | 8 +- .../owlapi/ClassToRuleHeadConverter.java | 8 +- .../owlapi/OwlAxiomToRulesConverter.java | 8 +- .../OwlFeatureNotSupportedException.java | 4 +- .../owlapi/OwlToRulesConversionHelper.java | 4 +- .../rulewerk/owlapi/OwlToRulesConverter.java | 8 +- .../owlapi/OwlAxiomToRulesConverterTest.java | 4 +- .../parser/ConfigurableLiteralHandler.java | 8 +- .../parser/DataSourceDeclarationHandler.java | 4 +- .../parser/DatatypeConstantHandler.java | 8 +- .../parser/DefaultParserConfiguration.java | 8 +- .../rulewerk/parser/DirectiveArgument.java | 8 +- .../rulewerk/parser/DirectiveHandler.java | 8 +- .../LocalPrefixDeclarationRegistry.java | 8 +- .../rulewerk/parser/ParserConfiguration.java | 8 +- .../rulewerk/parser/ParsingException.java | 8 +- .../rulewerk/parser/RuleParser.java | 8 +- .../CsvFileDataSourceDeclarationHandler.java | 8 +- .../RdfFileDataSourceDeclarationHandler.java | 8 +- ...eryResultDataSourceDeclarationHandler.java | 4 +- .../ImportFileDirectiveHandler.java | 8 +- .../ImportFileRelativeDirectiveHandler.java | 8 +- .../parser/javacc/JavaCCParserBase.java | 8 +- .../parser/javacc/SubParserFactory.java | 8 +- .../parser/DirectiveArgumentTest.java | 8 +- .../rulewerk/parser/DirectiveHandlerTest.java | 8 +- .../rulewerk/parser/EntityTest.java | 8 +- .../parser/ParserConfigurationTest.java | 8 +- .../rulewerk/parser/ParserTestUtils.java | 8 +- .../RuleParserConfigurableLiteralTest.java | 8 +- .../parser/RuleParserDataSourceTest.java | 8 +- .../parser/RuleParserParseFactTest.java | 8 +- .../rulewerk/parser/RuleParserTest.java | 8 +- .../parser/javacc/JavaCCParserBaseTest.java | 8 +- .../rulewerk/rdf/RdfModelConverter.java | 8 +- .../rulewerk/rdf/RdfValueToTermConverter.java | 8 +- .../rulewerk/rdf/RdfTestUtils.java | 8 +- .../rdf/TestConvertRdfFileToFacts.java | 8 +- .../rulewerk/rdf/TestReasonOverRdfFacts.java | 8 +- 194 files changed, 2337 insertions(+), 2337 deletions(-) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index 4984fba5e..edaf61f89 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index 935dde8fc..4f44b7f4c 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index 1af92ac6a..354fa52f4 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index 5e593b00c..8d43da5fd 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java index 8663d80d4..c2db5d180 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java index 52bc1e777..961c3a8ed 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java index 6cf1df352..f1845c573 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java @@ -4,14 +4,14 @@ * #%L * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 Rulewerk Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java index 7a4b65532..09a780f0c 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java @@ -13,16 +13,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java index 2ef16cb40..608b10438 100644 --- a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java @@ -12,16 +12,16 @@ /*- * #%L - * VLog4j Client + * Rulewerk Client * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index a274e91cf..67f033a81 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index afd7ec5bd..b81bfaffe 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index bd28395dc..813035df3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -1,8 +1,8 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java index 7d848760a..d8c046a19 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java index d0adc72e8..65a52d9c6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java index 9ce72ce46..750434b73 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java index 8251cc869..be4e9f9ee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java index 0a9c431cd..bf04acd88 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java index 83390c1c7..402d80127 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java index 398e7811a..da1837bba 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java index fb75afea7..28fbc87d5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java index 541f0c598..431b90299 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java index 1ca388565..4e7d60d78 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java index ff082b2eb..61a302e32 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java index e10e49fbb..fbd60d57b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java index 663ecf4a8..b278f7722 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index 921efca58..4b1350265 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java index 90caac997..df5c6e6b0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java index 66fa04ad2..6c4598a77 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index 0a0dc5808..e34ec9b24 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index eb1549f4f..1d24f1daa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java index f3d1cc6d7..49c6319b7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java @@ -1,43 +1,43 @@ -package org.semanticweb.rulewerk.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; - -/** - * A Query Result represents a list of terms that match the terms of the asked - * query. The terms can be named individuals (constants) and anonymous - * individuals (blanks). - * - * @author Irina Dragoste - * - */ -public interface QueryResult { - - /** - * Getter for the terms that represent a query answer. - * - * @return the terms that represent a query answer. They can be named - * individuals (constants) and anonymous individuals (blanks). - */ - List getTerms(); - -} +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +/** + * A Query Result represents a list of terms that match the terms of the asked + * query. The terms can be named individuals (constants) and anonymous + * individuals (blanks). + * + * @author Irina Dragoste + * + */ +public interface QueryResult { + + /** + * Getter for the terms that represent a query answer. + * + * @return the terms that represent a query answer. They can be named + * individuals (constants) and anonymous individuals (blanks). + */ + List getTerms(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java index 8f7b4ee33..81a5000b5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java index 69c6f83c8..fc2b4b009 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java index ced6c05dc..5e8f308f6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java index 73dcafc12..31de18cf6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index 0c631d653..c8aabae47 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java index 9453cb25c..490604400 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java index 1dad479be..36d662321 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java index 2baf7355e..a611d0bc8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java index 12bc6ab19..e432c3c20 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java index 69210b5d0..ba0785752 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java index 699a96d41..426c993c6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java @@ -5,16 +5,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index ba4290138..e19864aee 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 2f58af65b..3e9127f74 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java index c8040af01..e6c037d43 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java @@ -5,16 +5,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java index 8f24855d3..24a750694 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java index 1fcb6bd68..db80f1549 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java index 6f42312c0..f81e44681 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java index 953d92c31..2f7f41e5f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index fcb3e01eb..24998c007 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java index 60ee41579..fad53f4bb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java index 9a112ba09..235bfcd31 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index a63f73950..77aa2038b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java index ff41632ae..803629460 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index 740e8af97..554ae0f63 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index 83eb4e4e0..c0aba7096 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 25e9d3c21..6ec346dae 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -1,93 +1,93 @@ -package org.semanticweb.rulewerk.core.model.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.rulewerk.core.model.api.Predicate; - -/** - * Implementation for {@link Predicate}. Supports predicates of arity 1 or - * higher. - * - * @author Irina Dragoste - * - */ -public class PredicateImpl implements Predicate { - - final private String name; - - final private int arity; - - /** - * Constructor for {@link Predicate}s of arity 1 or higher. - * - * @param name a non-blank String (not null, nor empty or whitespace). - * @param arity an int value strictly greater than 0. - */ - public PredicateImpl(@NonNull String name, int arity) { - Validate.notBlank(name, "Predicates cannot be named by blank Strings."); - Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); - - this.name = name; - this.arity = arity; - } - - @Override - public String getName() { - return this.name; - } - - @Override - public int getArity() { - return this.arity; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = this.arity; - result = prime * result + this.name.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof Predicate)) { - return false; - } - final Predicate other = (Predicate) obj; - - return this.arity == other.getArity() && this.name.equals(other.getName()); - } - - @Override - public String toString() { - return getSyntacticRepresentation(); - } - -} +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.apache.commons.lang3.Validate; +import org.eclipse.jdt.annotation.NonNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; + +/** + * Implementation for {@link Predicate}. Supports predicates of arity 1 or + * higher. + * + * @author Irina Dragoste + * + */ +public class PredicateImpl implements Predicate { + + final private String name; + + final private int arity; + + /** + * Constructor for {@link Predicate}s of arity 1 or higher. + * + * @param name a non-blank String (not null, nor empty or whitespace). + * @param arity an int value strictly greater than 0. + */ + public PredicateImpl(@NonNull String name, int arity) { + Validate.notBlank(name, "Predicates cannot be named by blank Strings."); + Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); + + this.name = name; + this.arity = arity; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public int getArity() { + return this.arity; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = this.arity; + result = prime * result + this.name.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof Predicate)) { + return false; + } + final Predicate other = (Predicate) obj; + + return this.arity == other.getArity() && this.name.equals(other.getName()); + } + + @Override + public String toString() { + return getSyntacticRepresentation(); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java index d54bf8512..140ac7532 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java @@ -4,16 +4,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java index acd038dc0..aae5c7233 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 6ee191e48..9df2cde79 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -9,16 +9,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java index 1211841b4..ce6b40aa6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java @@ -5,16 +5,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java index 36676c4ae..64c828b51 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java index ea3994d5a..d45b9359d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java @@ -1,10 +1,10 @@ -package org.semanticweb.rulewerk.core.reasoner; - -/* +package org.semanticweb.rulewerk.core.reasoner; + +/* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,9 +17,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * #L% - */ - -public enum Algorithm { - SKOLEM_CHASE, RESTRICTED_CHASE -} + * #L% + */ + +public enum Algorithm { + SKOLEM_CHASE, RESTRICTED_CHASE +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java index 1c6d077bd..264616546 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java index 2e289278b..c5898ef05 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 90cc3bb74..5ebe5560c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java index 127504d46..c46fc60cb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java index fa1a54d45..875612e2f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java index 56cf95bcf..c34419579 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java index 99d08f05e..981d1f5b8 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java index 04138ef5a..ad4825fb1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -18,16 +18,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java index bf22ef019..365aec7f7 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java @@ -2,16 +2,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java index 3aecb060a..8df1012a3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java @@ -1,32 +1,32 @@ -package org.semanticweb.rulewerk.core.reasoner; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public enum RuleRewriteStrategy { - /** - * Rules are not re-written - */ - NONE, - /** - * Rule heads are split into head pieces whenever possible - */ - SPLIT_HEAD_PIECES -} +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public enum RuleRewriteStrategy { + /** + * Rules are not re-written + */ + NONE, + /** + * Rule heads are split into head pieces whenever possible + */ + SPLIT_HEAD_PIECES +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index fee712e49..83aff537a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java index 9c48bbb10..449a9dbe9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index b0761e238..4d79ae3a2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java index 13a7066a8..c3033c8a6 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index 1f2f943ee..c146179c9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java index 27814ab4e..edd6b44ca 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index 6727fd558..b143f7b4f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index 43e1c44b1..c1274aac2 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index ddde0498a..aaa9d1f2f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index b105f8d82..2ee6c900b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index b83cc7a12..345bd8aaf 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -1,133 +1,133 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.TermType; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; -import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; -import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; -import org.semanticweb.rulewerk.core.model.api.TermVisitor; -import org.semanticweb.rulewerk.core.model.api.UniversalVariable; - -/** - * A visitor that converts {@link Term}s of different types to corresponding - * internal VLog model {@link karmaresearch.vlog.Term}s. - * - * @author Irina Dragoste - * - */ -class TermToVLogConverter implements TermVisitor { - - /** - * Transforms an abstract constant to a {@link karmaresearch.vlog.Term} with the - * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. - */ - @Override - public karmaresearch.vlog.Term visit(AbstractConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); - } - - /** - * Transforms a datatype constant to a {@link karmaresearch.vlog.Term} with the - * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. - */ - @Override - public karmaresearch.vlog.Term visit(DatatypeConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); - } - - /** - * Transforms a language-tagged string constant to a - * {@link karmaresearch.vlog.Term} with the same name and type - * {@link karmaresearch.vlog.Term.TermType#CONSTANT}. - */ - @Override - public karmaresearch.vlog.Term visit(LanguageStringConstant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); - } - - /** - * Converts the given constant to the name of a constant in VLog. - * - * @param constant - * @return VLog constant string - */ - public static String getVLogNameForConstant(Constant constant) { - if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - String vLog4jConstantName = constant.getName(); - if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; - } else { // keep relative IRIs unchanged - return vLog4jConstantName; - } - } else { // datatype literal - return constant.getName(); - } - } - - /** - * Converts the string representation of a constant in VLog4j directly to the - * name of a constant in VLog, without parsing it into a {@link Constant} first. - * - * @param vLog4jConstantName - * @return VLog constant string - */ - public static String getVLogNameForConstantName(String vLog4jConstantName) { - if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged - return vLog4jConstantName; - } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; - } else { // keep relative IRIs unchanged - return vLog4jConstantName; - } - } - - /** - * Transforms a universal variable to a {@link karmaresearch.vlog.Term} with the - * same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. - */ - @Override - public karmaresearch.vlog.Term visit(UniversalVariable term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); - } - - /** - * Transforms an existential variable to a {@link karmaresearch.vlog.Term} with - * the same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. - */ - @Override - public karmaresearch.vlog.Term visit(ExistentialVariable term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "!" + term.getName()); - } - - /** - * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name - * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. - */ - @Override - public karmaresearch.vlog.Term visit(NamedNull term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * A visitor that converts {@link Term}s of different types to corresponding + * internal VLog model {@link karmaresearch.vlog.Term}s. + * + * @author Irina Dragoste + * + */ +class TermToVLogConverter implements TermVisitor { + + /** + * Transforms an abstract constant to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(AbstractConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); + } + + /** + * Transforms a datatype constant to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(DatatypeConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + } + + /** + * Transforms a language-tagged string constant to a + * {@link karmaresearch.vlog.Term} with the same name and type + * {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(LanguageStringConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + } + + /** + * Converts the given constant to the name of a constant in VLog. + * + * @param constant + * @return VLog constant string + */ + public static String getVLogNameForConstant(Constant constant) { + if (constant.getType() == TermType.ABSTRACT_CONSTANT) { + String vLog4jConstantName = constant.getName(); + if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > + return "<" + vLog4jConstantName + ">"; + } else { // keep relative IRIs unchanged + return vLog4jConstantName; + } + } else { // datatype literal + return constant.getName(); + } + } + + /** + * Converts the string representation of a constant in VLog4j directly to the + * name of a constant in VLog, without parsing it into a {@link Constant} first. + * + * @param vLog4jConstantName + * @return VLog constant string + */ + public static String getVLogNameForConstantName(String vLog4jConstantName) { + if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged + return vLog4jConstantName; + } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > + return "<" + vLog4jConstantName + ">"; + } else { // keep relative IRIs unchanged + return vLog4jConstantName; + } + } + + /** + * Transforms a universal variable to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. + */ + @Override + public karmaresearch.vlog.Term visit(UniversalVariable term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); + } + + /** + * Transforms an existential variable to a {@link karmaresearch.vlog.Term} with + * the same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. + */ + @Override + public karmaresearch.vlog.Term visit(ExistentialVariable term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "!" + term.getName()); + } + + /** + * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name + * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. + */ + @Override + public karmaresearch.vlog.Term visit(NamedNull term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java index d03b1b118..68eb58133 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogDataSource.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java index c66b3094d..a7e7da9aa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogKnowledgeBase.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java index 835fe699a..1db2cc922 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogQueryResultIterator.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index a6b48b5bf..66f88d4bb 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -48,16 +48,16 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java index 3864b4fb7..d90331ccc 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverter.java @@ -2,9 +2,9 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java index 2e6dba525..322bbda3f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/ConjunctionImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 880172977..32ef82333 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java index 70763c3a4..7539d60c9 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/FactTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index c6202e864..d6889aae4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java index c97f71504..34a7e9fc9 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/NegativeLiteralImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java index 265f096c9..d4a93e489 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PositiveLiteralImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java index 7c398b37b..d8274db91 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/PredicateImplTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java index 47c759252..5fc0ee6e6 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/RuleImplTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java index 77f47b7b6..9aa9f7129 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/TermImplTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java index 1305e8acc..b44f89c24 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java index 6b51bfe7a..269cb56cc 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/LoggingTest.java @@ -11,9 +11,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java index 8e64d2915..b8e28c53d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/ReasonerTimeoutTest.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index 5e073a77e..87d84362f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java index be9efb6c2..9de6276f3 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AnswerQueryTest.java @@ -7,9 +7,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index 22cc04bbb..f9b840f1e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index 945c4482a..74a5215fe 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -7,16 +7,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java index 1460a5a7f..92512fdd6 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index a8b773a8d..8ff7491e2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -1,252 +1,252 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.NamedNull; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; - -public class ModelToVLogConverterTest { - - @Test - public void testToVLogTermVariable() { - final Variable variable = Expressions.makeUniversalVariable("var"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.VARIABLE, "var"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(variable); - - assertEquals(expectedVLogTerm, vLogTerm); - } - - @Test - public void testToVLogTermAbstractConstant() { - final Constant constant = Expressions.makeAbstractConstant("const"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "const"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); - } - - @Test - public void testToVLogTermAbstractConstantIri() { - final Constant constant = Expressions.makeAbstractConstant("http://example.org"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, ""); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); - } - - @Test - public void testToVLogTermDatatypeConstant() { - final Constant constant = Expressions.makeDatatypeConstant("c", "http://example.org"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"^^"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); - } - - @Test - public void testToVLogTermLanguageStringConstant() { - final Constant constant = Expressions.makeLanguageStringConstant("c", "en"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"@en"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertEquals(expectedVLogTerm, vLogTerm); - assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); - } - - @Test - public void testToVLogTermBlank() { - final NamedNull blank = new NamedNullImpl("blank"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.BLANK, "blank"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(blank); - - assertEquals(expectedVLogTerm, vLogTerm); - } - - @Test - public void testToVLogTermArray() { - final Variable vx = Expressions.makeUniversalVariable("x"); - final Variable vxToo = Expressions.makeUniversalVariable("x"); - final Variable vy = Expressions.makeUniversalVariable("y"); - final Constant cx = Expressions.makeAbstractConstant("x"); - final NamedNull bx = new NamedNullImpl("x"); - final List terms = Arrays.asList(vx, cx, vxToo, bx, vy); - - final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.VARIABLE, "y"); - final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term( - karmaresearch.vlog.Term.TermType.CONSTANT, "x"); - final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, - "x"); - final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, - expectedVy }; - - final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); - assertArrayEquals(expectedTermArray, vLogTermArray); - } - - @Test - public void testToVLogTermArrayEmpty() { - final List terms = new ArrayList<>(); - final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); - - assertNotNull(vLogTermArray); - assertTrue(vLogTermArray.length == 0); - } - - @Test - public void testToVLogFactTuples() { - final Constant c1 = Expressions.makeAbstractConstant("1"); - final Constant c2 = Expressions.makeAbstractConstant("2"); - final Constant c3 = Expressions.makeAbstractConstant("3"); - final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(c1)); - final Fact atom2 = Expressions.makeFact("p2", Arrays.asList(c2, c3)); - - final String[][] vLogTuples = ModelToVLogConverter.toVLogFactTuples(Arrays.asList(atom1, atom2)); - - final String[][] expectedTuples = { { "1" }, { "2", "3" } }; - assertArrayEquals(expectedTuples, vLogTuples); - } - - @Test - public void testToVLogPredicate() { - final Predicate predicate = Expressions.makePredicate("pred", 1); - final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); - assertEquals("pred-1", vLogPredicate); - } - - @Test - public void testToVLogAtom() { - final Constant c = Expressions.makeAbstractConstant("c"); - final Variable x = Expressions.makeUniversalVariable("x"); - final NamedNull b = new NamedNullImpl("_:b"); - final PositiveLiteral atom = Expressions.makePositiveLiteral("pred", c, x, b); - - final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, - "c"); - final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "x"); - final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, - "_:b"); - - final String expectedPredicateName = "pred" + ModelToVLogConverter.PREDICATE_ARITY_SUFFIX_SEPARATOR + 3; - final karmaresearch.vlog.Term[] expectedTerms = { expectedC, expectedX, expectedB }; - final karmaresearch.vlog.Atom expectedAtom = new karmaresearch.vlog.Atom(expectedPredicateName, expectedTerms); - - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(atom); - assertEquals(expectedAtom, vLogAtom); - } - - @Test - public void testToVLogRuleArray() { - final Variable x = Expressions.makeUniversalVariable("x"); - final Variable y = Expressions.makeUniversalVariable("y"); - final Variable z = Expressions.makeUniversalVariable("z"); - final Variable w = Expressions.makeUniversalVariable("w"); - final Variable v = Expressions.makeExistentialVariable("v"); - final PositiveLiteral atomP1X = Expressions.makePositiveLiteral("p1", x); - final PositiveLiteral atomP2XY = Expressions.makePositiveLiteral("p2", x, y); - final PositiveLiteral atomP3YZ = Expressions.makePositiveLiteral("p3", y, z); - final Rule rule1 = Expressions.makeRule(atomP1X, atomP2XY, atomP3YZ); - final PositiveLiteral atomQXYZ = Expressions.makePositiveLiteral("q", x, y, z); - final PositiveLiteral atomQYW = Expressions.makePositiveLiteral("q", y, w); - final PositiveLiteral atomQ1XWZ = Expressions.makePositiveLiteral("q1", x, w, z); - final PositiveLiteral atomQ2XV = Expressions.makePositiveLiteral("q2", x, v); - final Rule rule2 = Expressions.makeRule(atomQ2XV, atomQ1XWZ, atomQYW, atomQXYZ); - - final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "x"); - final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "y"); - final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "z"); - final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "w"); - final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, - "!v"); - final karmaresearch.vlog.Atom expAtomP1X = new karmaresearch.vlog.Atom("p1-1", expX); - final karmaresearch.vlog.Atom expAtomP2XY = new karmaresearch.vlog.Atom("p2-2", expX, expY); - final karmaresearch.vlog.Atom expAtomP3YZ = new karmaresearch.vlog.Atom("p3-2", expY, expZ); - final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule( - new karmaresearch.vlog.Atom[] { expAtomP1X }, - new karmaresearch.vlog.Atom[] { expAtomP2XY, expAtomP3YZ }); - final karmaresearch.vlog.Atom expAtomQXYZ = new karmaresearch.vlog.Atom("q-3", expX, expY, expZ); - final karmaresearch.vlog.Atom expAtomQYW = new karmaresearch.vlog.Atom("q-2", expY, expW); - final karmaresearch.vlog.Atom expAtomQ1XWZ = new karmaresearch.vlog.Atom("q1-3", expX, expW, expZ); - final karmaresearch.vlog.Atom expAtomQ2XV = new karmaresearch.vlog.Atom("q2-2", expX, expV); - final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule( - new karmaresearch.vlog.Atom[] { expAtomQ2XV }, - new karmaresearch.vlog.Atom[] { expAtomQ1XWZ, expAtomQYW, expAtomQXYZ }); - - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter - .toVLogRuleArray(Arrays.asList(rule1, rule2)); - final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, - expectedRule2 }; - assertArrayEquals(expectedRuleArray, vLogRuleArray); - } - - @Test - public void testVLogRuleRewritingStrategy() { - assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, - ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); - assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.AGGRESSIVE, - ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES)); - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; + +public class ModelToVLogConverterTest { + + @Test + public void testToVLogTermVariable() { + final Variable variable = Expressions.makeUniversalVariable("var"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "var"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(variable); + + assertEquals(expectedVLogTerm, vLogTerm); + } + + @Test + public void testToVLogTermAbstractConstant() { + final Constant constant = Expressions.makeAbstractConstant("const"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "const"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermAbstractConstantIri() { + final Constant constant = Expressions.makeAbstractConstant("http://example.org"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, ""); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermDatatypeConstant() { + final Constant constant = Expressions.makeDatatypeConstant("c", "http://example.org"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"^^"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermLanguageStringConstant() { + final Constant constant = Expressions.makeLanguageStringConstant("c", "en"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"@en"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + } + + @Test + public void testToVLogTermBlank() { + final NamedNull blank = new NamedNullImpl("blank"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.BLANK, "blank"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(blank); + + assertEquals(expectedVLogTerm, vLogTerm); + } + + @Test + public void testToVLogTermArray() { + final Variable vx = Expressions.makeUniversalVariable("x"); + final Variable vxToo = Expressions.makeUniversalVariable("x"); + final Variable vy = Expressions.makeUniversalVariable("y"); + final Constant cx = Expressions.makeAbstractConstant("x"); + final NamedNull bx = new NamedNullImpl("x"); + final List terms = Arrays.asList(vx, cx, vxToo, bx, vy); + + final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "x"); + final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "y"); + final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "x"); + final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "x"); + final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, + expectedVy }; + + final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); + assertArrayEquals(expectedTermArray, vLogTermArray); + } + + @Test + public void testToVLogTermArrayEmpty() { + final List terms = new ArrayList<>(); + final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); + + assertNotNull(vLogTermArray); + assertTrue(vLogTermArray.length == 0); + } + + @Test + public void testToVLogFactTuples() { + final Constant c1 = Expressions.makeAbstractConstant("1"); + final Constant c2 = Expressions.makeAbstractConstant("2"); + final Constant c3 = Expressions.makeAbstractConstant("3"); + final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(c1)); + final Fact atom2 = Expressions.makeFact("p2", Arrays.asList(c2, c3)); + + final String[][] vLogTuples = ModelToVLogConverter.toVLogFactTuples(Arrays.asList(atom1, atom2)); + + final String[][] expectedTuples = { { "1" }, { "2", "3" } }; + assertArrayEquals(expectedTuples, vLogTuples); + } + + @Test + public void testToVLogPredicate() { + final Predicate predicate = Expressions.makePredicate("pred", 1); + final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); + assertEquals("pred-1", vLogPredicate); + } + + @Test + public void testToVLogAtom() { + final Constant c = Expressions.makeAbstractConstant("c"); + final Variable x = Expressions.makeUniversalVariable("x"); + final NamedNull b = new NamedNullImpl("_:b"); + final PositiveLiteral atom = Expressions.makePositiveLiteral("pred", c, x, b); + + final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "c"); + final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "_:b"); + + final String expectedPredicateName = "pred" + ModelToVLogConverter.PREDICATE_ARITY_SUFFIX_SEPARATOR + 3; + final karmaresearch.vlog.Term[] expectedTerms = { expectedC, expectedX, expectedB }; + final karmaresearch.vlog.Atom expectedAtom = new karmaresearch.vlog.Atom(expectedPredicateName, expectedTerms); + + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(atom); + assertEquals(expectedAtom, vLogAtom); + } + + @Test + public void testToVLogRuleArray() { + final Variable x = Expressions.makeUniversalVariable("x"); + final Variable y = Expressions.makeUniversalVariable("y"); + final Variable z = Expressions.makeUniversalVariable("z"); + final Variable w = Expressions.makeUniversalVariable("w"); + final Variable v = Expressions.makeExistentialVariable("v"); + final PositiveLiteral atomP1X = Expressions.makePositiveLiteral("p1", x); + final PositiveLiteral atomP2XY = Expressions.makePositiveLiteral("p2", x, y); + final PositiveLiteral atomP3YZ = Expressions.makePositiveLiteral("p3", y, z); + final Rule rule1 = Expressions.makeRule(atomP1X, atomP2XY, atomP3YZ); + final PositiveLiteral atomQXYZ = Expressions.makePositiveLiteral("q", x, y, z); + final PositiveLiteral atomQYW = Expressions.makePositiveLiteral("q", y, w); + final PositiveLiteral atomQ1XWZ = Expressions.makePositiveLiteral("q1", x, w, z); + final PositiveLiteral atomQ2XV = Expressions.makePositiveLiteral("q2", x, v); + final Rule rule2 = Expressions.makeRule(atomQ2XV, atomQ1XWZ, atomQYW, atomQXYZ); + + final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "y"); + final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "z"); + final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "w"); + final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "!v"); + final karmaresearch.vlog.Atom expAtomP1X = new karmaresearch.vlog.Atom("p1-1", expX); + final karmaresearch.vlog.Atom expAtomP2XY = new karmaresearch.vlog.Atom("p2-2", expX, expY); + final karmaresearch.vlog.Atom expAtomP3YZ = new karmaresearch.vlog.Atom("p3-2", expY, expZ); + final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomP1X }, + new karmaresearch.vlog.Atom[] { expAtomP2XY, expAtomP3YZ }); + final karmaresearch.vlog.Atom expAtomQXYZ = new karmaresearch.vlog.Atom("q-3", expX, expY, expZ); + final karmaresearch.vlog.Atom expAtomQYW = new karmaresearch.vlog.Atom("q-2", expY, expW); + final karmaresearch.vlog.Atom expAtomQ1XWZ = new karmaresearch.vlog.Atom("q1-3", expX, expW, expZ); + final karmaresearch.vlog.Atom expAtomQ2XV = new karmaresearch.vlog.Atom("q2-2", expX, expV); + final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomQ2XV }, + new karmaresearch.vlog.Atom[] { expAtomQ1XWZ, expAtomQYW, expAtomQXYZ }); + + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter + .toVLogRuleArray(Arrays.asList(rule1, rule2)); + final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, + expectedRule2 }; + assertArrayEquals(expectedRuleArray, vLogRuleArray); + } + + @Test + public void testVLogRuleRewritingStrategy() { + assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, + ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); + assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.AGGRESSIVE, + ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES)); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java index 9c5a993e8..2e7e0c29f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java index af30f5a3f..ffbada38f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnsweringCorrectnessTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java index 0335dd845..724a41064 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java @@ -1,60 +1,60 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; - -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.QueryResult; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; - -public class QueryResultImplTest { - - @Test - public void testEquals() { - final Constant c1 = Expressions.makeAbstractConstant("C"); - final Constant c2 = Expressions.makeAbstractConstant("ddd"); - final List constantList = Arrays.asList(c1, c1, c2); - - final QueryResult queryResult1 = new QueryResultImpl(constantList); - final QueryResult queryResult2 = new QueryResultImpl(Arrays.asList(c1, c1, c2)); - final QueryResult queryResult3 = new QueryResultImpl(Arrays.asList(c1, c2, c1)); - - assertEquals(queryResult1, queryResult1); - assertEquals(queryResult2, queryResult1); - assertEquals(queryResult2.hashCode(), queryResult1.hashCode()); - assertNotEquals(queryResult3, queryResult1); - assertNotEquals(queryResult3.hashCode(), queryResult1.hashCode()); - assertNotEquals(new QueryResultImpl(null), queryResult1); - assertEquals(new QueryResultImpl(null), new QueryResultImpl(null)); - assertFalse(queryResult1.equals(null)); - assertFalse(queryResult1.equals(constantList)); - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; + +public class QueryResultImplTest { + + @Test + public void testEquals() { + final Constant c1 = Expressions.makeAbstractConstant("C"); + final Constant c2 = Expressions.makeAbstractConstant("ddd"); + final List constantList = Arrays.asList(c1, c1, c2); + + final QueryResult queryResult1 = new QueryResultImpl(constantList); + final QueryResult queryResult2 = new QueryResultImpl(Arrays.asList(c1, c1, c2)); + final QueryResult queryResult3 = new QueryResultImpl(Arrays.asList(c1, c2, c1)); + + assertEquals(queryResult1, queryResult1); + assertEquals(queryResult2, queryResult1); + assertEquals(queryResult2.hashCode(), queryResult1.hashCode()); + assertNotEquals(queryResult3, queryResult1); + assertNotEquals(queryResult3.hashCode(), queryResult1.hashCode()); + assertNotEquals(new QueryResultImpl(null), queryResult1); + assertEquals(new QueryResultImpl(null), new QueryResultImpl(null)); + assertFalse(queryResult1.equals(null)); + assertFalse(queryResult1.equals(constantList)); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java index 6bfbdf501..81ee7716e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultsUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index ba4730f83..c5baf8bde 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -4,16 +4,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java index da644f50d..51dc0fe67 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java index fdc5ba789..9e9806113 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java index d27cb9282..0c5ecf2e2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerBasics.java @@ -1,108 +1,108 @@ -package org.semanticweb.rulewerk.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.junit.Test; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; - -public class VLogReasonerBasics { - - final String constantNameC = "c"; - final String constantNameD = "d"; - - final Constant constantC = Expressions.makeAbstractConstant(constantNameC); - final Constant constantD = Expressions.makeAbstractConstant(constantNameD); - final Variable x = Expressions.makeUniversalVariable("x"); - final Fact factAc = Expressions.makeFact("A", Arrays.asList(constantC)); - final Fact factAd = Expressions.makeFact("A", Arrays.asList(constantD)); - final PositiveLiteral atomAx = Expressions.makePositiveLiteral("A", x); - final PositiveLiteral atomBx = Expressions.makePositiveLiteral("B", x); - final PositiveLiteral atomCx = Expressions.makePositiveLiteral("C", x); - final Rule ruleBxAx = Expressions.makeRule(atomBx, atomAx); - final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); - - @Test(expected = NullPointerException.class) - public void testSetAlgorithmNull() { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.setAlgorithm(null); - } - } - - @Test(expected = NullPointerException.class) - public void setRuleRewriteStrategy1() { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.setRuleRewriteStrategy(null); - } - } - - @Test - public void testLoadRules() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - assertEquals(Arrays.asList(ruleBxAx, ruleCxBx), kb.getRules()); - } - } - - @Test - public void testSimpleInference() throws IOException { - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); - - try (final VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.load(); - - final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); - assertFalse(cxQueryResultEnumBeforeReasoning.hasNext()); - - reasoner.reason(); - - final QueryResultIterator cxQueryResultEnumAfterReasoning = reasoner.answerQuery(atomCx, true); - final Set> actualResults = QueryResultsUtils - .collectQueryResults(cxQueryResultEnumAfterReasoning); - - final Set> expectedResults = new HashSet<>( - Arrays.asList(Arrays.asList(constantC), Arrays.asList(constantD))); - - assertEquals(expectedResults, actualResults); - } - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +public class VLogReasonerBasics { + + final String constantNameC = "c"; + final String constantNameD = "d"; + + final Constant constantC = Expressions.makeAbstractConstant(constantNameC); + final Constant constantD = Expressions.makeAbstractConstant(constantNameD); + final Variable x = Expressions.makeUniversalVariable("x"); + final Fact factAc = Expressions.makeFact("A", Arrays.asList(constantC)); + final Fact factAd = Expressions.makeFact("A", Arrays.asList(constantD)); + final PositiveLiteral atomAx = Expressions.makePositiveLiteral("A", x); + final PositiveLiteral atomBx = Expressions.makePositiveLiteral("B", x); + final PositiveLiteral atomCx = Expressions.makePositiveLiteral("C", x); + final Rule ruleBxAx = Expressions.makeRule(atomBx, atomAx); + final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); + + @Test(expected = NullPointerException.class) + public void testSetAlgorithmNull() { + try (final Reasoner reasoner = Reasoner.getInstance();) { + reasoner.setAlgorithm(null); + } + } + + @Test(expected = NullPointerException.class) + public void setRuleRewriteStrategy1() { + try (final Reasoner reasoner = Reasoner.getInstance();) { + reasoner.setRuleRewriteStrategy(null); + } + } + + @Test + public void testLoadRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + assertEquals(Arrays.asList(ruleBxAx, ruleCxBx), kb.getRules()); + } + } + + @Test + public void testSimpleInference() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); + assertFalse(cxQueryResultEnumBeforeReasoning.hasNext()); + + reasoner.reason(); + + final QueryResultIterator cxQueryResultEnumAfterReasoning = reasoner.answerQuery(atomCx, true); + final Set> actualResults = QueryResultsUtils + .collectQueryResults(cxQueryResultEnumAfterReasoning); + + final Set> expectedResults = new HashSet<>( + Arrays.asList(Arrays.asList(constantC), Arrays.asList(constantD))); + + assertEquals(expectedResults, actualResults); + } + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java index dae941489..6c305e40d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -6,9 +6,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java index a7d1f066a..bbdc37316 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java index 3c4e023f3..7585f47bf 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvOutput.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java index 213889d55..ecbc3ab07 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java index 601e6f57b..0d76dc569 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java index f5cb44aab..b95c1b005 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -5,9 +5,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java index a524ec507..8f85f806e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index dcaf16a5a..24e747cdb 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -34,16 +34,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java index db398d51d..0ee0ddcce 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java index 8c1f38594..89ad3228d 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java index 5d9f00983..1ee33c9eb 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/LargeAritiesTest.java @@ -1,139 +1,139 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; - -import static org.junit.Assert.assertArrayEquals; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Test; - -import karmaresearch.vlog.Atom; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.Rule; -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.RuleRewriteStrategy; - -/** - * Tests that reasoning and querying with predicates of large arities is - * allowed. - * - * @author Irina Dragoste - * - */ -public class LargeAritiesTest { - - final static int PREDICATE_ARITY_LIMIT = 255; - final static int VARIABLES_PER_RULE_LIMIT = 255; - - @Test - public void testLargeNumberOfVariablesPerRule() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT); - } - - @Test(expected = IllegalArgumentException.class) - public void testNumberOfVariablesPerRuleExceedsLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT + 1); - } - - @Test - public void testLargePredicateArities() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testPredicateArity(PREDICATE_ARITY_LIMIT); - } - - @Test(expected = IllegalArgumentException.class) - public void testPredicateAritiesExceedLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { - testPredicateArity(PREDICATE_ARITY_LIMIT + 1); - } - - private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) - throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { - final VLog vLog = new VLog(); - - final String[][] pFactArguments = { { "c" } }; - - final List body = new ArrayList<>(); - for (int i = 1; i <= variablesPerRuleLimit; i++) { - final String predicateName = "P" + i; - // Pi(xi) - body.add(VLogExpressions.makeAtom(predicateName, VLogExpressions.makeVariable("x" + i))); - // Pi(c) - vLog.addData(predicateName, pFactArguments); - } - final Atom head = VLogExpressions.makeAtom("q", VLogExpressions.makeVariable("x1")); - - // q(x1) :- P1(x1),...,Pn(xn) - final Rule rule = VLogExpressions.makeRule(head, body.toArray(new Atom[body.size()])); - - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - try (final TermQueryResultIterator queryResultIterator = vLog.query(head, true, false)) { - assertTrue(queryResultIterator.hasNext()); - final Term[] queryResult = queryResultIterator.next(); - assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, queryResult); - - assertFalse(queryResultIterator.hasNext()); - } - vLog.stop(); - } - - private void testPredicateArity(final int predicateArityLimit) - throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { - final List constants = new ArrayList<>(); - for (int i = 0; i < predicateArityLimit; i++) { - constants.add("c" + i); - } - final String[][] pFactArguments = { constants.toArray(new String[predicateArityLimit]) }; - - final List variables = new ArrayList<>(); - for (int i = 0; i < predicateArityLimit; i++) { - variables.add(VLogExpressions.makeVariable("x" + i)); - } - - final Term[] terms = variables.toArray(new Term[variables.size()]); - final Rule rule = VLogExpressions.makeRule(VLogExpressions.makeAtom("q", terms), - VLogExpressions.makeAtom("p", terms)); - final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", terms); - - final VLog vLog = new VLog(); - vLog.addData("p", pFactArguments); - - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - try (final TermQueryResultIterator queryResultIterator = vLog.query(queryAtomQPredicate, true, false)) { - assertTrue(queryResultIterator.hasNext()); - final Term[] queryResult = queryResultIterator.next(); - assertTrue(queryResult.length == predicateArityLimit); - - assertFalse(queryResultIterator.hasNext()); - } - vLog.stop(); - } - -} +package org.semanticweb.rulewerk.core.reasoner.vlog; + +import static org.junit.Assert.assertArrayEquals; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Test; + +import karmaresearch.vlog.Atom; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Rule; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.RuleRewriteStrategy; + +/** + * Tests that reasoning and querying with predicates of large arities is + * allowed. + * + * @author Irina Dragoste + * + */ +public class LargeAritiesTest { + + final static int PREDICATE_ARITY_LIMIT = 255; + final static int VARIABLES_PER_RULE_LIMIT = 255; + + @Test + public void testLargeNumberOfVariablesPerRule() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT); + } + + @Test(expected = IllegalArgumentException.class) + public void testNumberOfVariablesPerRuleExceedsLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT + 1); + } + + @Test + public void testLargePredicateArities() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testPredicateArity(PREDICATE_ARITY_LIMIT); + } + + @Test(expected = IllegalArgumentException.class) + public void testPredicateAritiesExceedLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testPredicateArity(PREDICATE_ARITY_LIMIT + 1); + } + + private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final VLog vLog = new VLog(); + + final String[][] pFactArguments = { { "c" } }; + + final List body = new ArrayList<>(); + for (int i = 1; i <= variablesPerRuleLimit; i++) { + final String predicateName = "P" + i; + // Pi(xi) + body.add(VLogExpressions.makeAtom(predicateName, VLogExpressions.makeVariable("x" + i))); + // Pi(c) + vLog.addData(predicateName, pFactArguments); + } + final Atom head = VLogExpressions.makeAtom("q", VLogExpressions.makeVariable("x1")); + + // q(x1) :- P1(x1),...,Pn(xn) + final Rule rule = VLogExpressions.makeRule(head, body.toArray(new Atom[body.size()])); + + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + try (final TermQueryResultIterator queryResultIterator = vLog.query(head, true, false)) { + assertTrue(queryResultIterator.hasNext()); + final Term[] queryResult = queryResultIterator.next(); + assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, queryResult); + + assertFalse(queryResultIterator.hasNext()); + } + vLog.stop(); + } + + private void testPredicateArity(final int predicateArityLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final List constants = new ArrayList<>(); + for (int i = 0; i < predicateArityLimit; i++) { + constants.add("c" + i); + } + final String[][] pFactArguments = { constants.toArray(new String[predicateArityLimit]) }; + + final List variables = new ArrayList<>(); + for (int i = 0; i < predicateArityLimit; i++) { + variables.add(VLogExpressions.makeVariable("x" + i)); + } + + final Term[] terms = variables.toArray(new Term[variables.size()]); + final Rule rule = VLogExpressions.makeRule(VLogExpressions.makeAtom("q", terms), + VLogExpressions.makeAtom("p", terms)); + final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", terms); + + final VLog vLog = new VLog(); + vLog.addData("p", pFactArguments); + + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + try (final TermQueryResultIterator queryResultIterator = vLog.query(queryAtomQPredicate, true, false)) { + assertTrue(queryResultIterator.hasNext()); + final Term[] queryResult = queryResultIterator.next(); + assertTrue(queryResult.length == predicateArityLimit); + + assertFalse(queryResultIterator.hasNext()); + } + vLog.stop(); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java index b4b89ae14..ee9041fda 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/StratifiedNegationTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java index 676dd79dd..afc81d080 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java index 1d9b7ca0d..fb2882349 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromMemoryTest.java @@ -1,248 +1,248 @@ -package org.semanticweb.rulewerk.core.reasoner.vlog; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.lang3.StringUtils; -import org.junit.Assert; -import org.junit.Test; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.NonExistingPredicateException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.Rule; -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.RuleRewriteStrategy; - -/** - * Tests VLog functionality when data (facts) is loaded exclusively from memory. - * - * @author Irina.Dragoste - * - */ -public class VLogDataFromMemoryTest { - - @Test - public void testVLogSimpleInference() throws AlreadyStartedException, EDBConfigurationException, IOException, - NotStartedException, NonExistingPredicateException { - - final String[][] argsAMatrix = { { "a" }, { "b" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - // tuples: [[a], [b]] - final Set> tuples = new HashSet<>(); - tuples.add(Arrays.asList(VLogExpressions.makeConstant("a"))); - tuples.add(Arrays.asList(VLogExpressions.makeConstant("b"))); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); // Assert A(a), A(b) - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - - // Querying A(?X) before materialize - final TermQueryResultIterator queryResultIteratorAx1 = vLog.query(atomAx); - final Set> queryAxResults1 = VLogQueryResultUtils.collectResults(queryResultIteratorAx1); - assertEquals(tuples, queryAxResults1); - - // Querying B(?X) before materialize - final TermQueryResultIterator queryResultIteratorBx1 = vLog.query(atomBx); - assertFalse(queryResultIteratorBx1.hasNext()); - queryResultIteratorBx1.close(); - - vLog.materialize(true); - - // Querying B(?X) after materialize - final TermQueryResultIterator queryResultIteratorBx2 = vLog.query(atomBx); - final Set> queryResultsBx = VLogQueryResultUtils.collectResults(queryResultIteratorBx2); - assertEquals(tuples, queryResultsBx); - - final TermQueryResultIterator queryResultIteratorAx2 = vLog.query(atomAx); - final Set> queryAxResults2 = VLogQueryResultUtils.collectResults(queryResultIteratorAx2); - assertEquals(tuples, queryAxResults2); - - vLog.stop(); - } - - @Test - public void testBooleanQueryTrueIncludeConstantsFalse() throws AlreadyStartedException, EDBConfigurationException, - IOException, NotStartedException, NonExistingPredicateException { - // Creating rules and facts - final String[][] argsAMatrix = { { "a", "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); - final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); - - final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); - assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); - final Term[] expectedQueryResult = { constantA, constantA }; - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); - assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); - defaultIteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorNoConstantsNoBlanks = vLog.query(booleanQueryAtomBa, false, false); - assertTrue(iteratorNoConstantsNoBlanks.hasNext()); - assertTrue(iteratorNoConstantsNoBlanks.next().length == 0); - iteratorNoConstantsNoBlanks.close(); - - final TermQueryResultIterator iteratorNoConstantsWithBlanks = vLog.query(booleanQueryAtomBa, false, true); - assertTrue(iteratorNoConstantsWithBlanks.hasNext()); - Assert.assertTrue(iteratorNoConstantsWithBlanks.next().length == 0); - assertFalse(iteratorNoConstantsWithBlanks.hasNext()); - iteratorNoConstantsWithBlanks.close(); - - vLog.stop(); - } - - @Test - public void testBooleanQueryTrueIncludeConstantsTrue() throws AlreadyStartedException, EDBConfigurationException, - IOException, NotStartedException, NonExistingPredicateException { - // Creating rules and facts - final String[][] argsAMatrix = { { "a", "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); // A(x,x) -> B(x,x) - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); // assert A(a,a) - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); - final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); - - final Term[] expectedQueryResult = { constantA, constantA }; - - final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); - assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); - assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); - defaultIteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa, true, false); - assertTrue(iteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult3 = iteratorWithConstantsAndBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult3); - assertFalse(iteratorWithConstantsAndBlanks.hasNext()); - iteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorWithConstantsNoBlanks = vLog.query(booleanQueryAtomBa, true, true); - assertTrue(iteratorWithConstantsNoBlanks.hasNext()); - final Term[] actualQueryResult2 = iteratorWithConstantsNoBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult2); - assertFalse(iteratorWithConstantsNoBlanks.hasNext()); - iteratorWithConstantsNoBlanks.close(); - - vLog.stop(); - } - - @Test - public void testBooleanQueryFalse() throws AlreadyStartedException, EDBConfigurationException, IOException, - NotStartedException, NonExistingPredicateException { - final String[][] argsAMatrix = { { "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantB = VLogExpressions.makeConstant("b"); - final karmaresearch.vlog.Atom booleanQueryAtomBb = new karmaresearch.vlog.Atom("B", constantB); - - final TermQueryResultIterator queryResultEnnumeration = vLog.query(booleanQueryAtomBb); - assertFalse(queryResultEnnumeration.hasNext()); - - queryResultEnnumeration.close(); - vLog.stop(); - } - - @Test(expected = NonExistingPredicateException.class) - public void queryEmptyKnowledgeBaseBeforeReasoning() throws NotStartedException, AlreadyStartedException, - EDBConfigurationException, IOException, NonExistingPredicateException { - // Start VLog - final VLog vLog = new VLog(); - try { - vLog.start(StringUtils.EMPTY, false); - - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", - VLogExpressions.makeVariable("?x")); - - vLog.query(queryAtom); - } finally { - vLog.stop(); - } - } - - @Test(expected = NonExistingPredicateException.class) - public void queryEmptyKnowledgeBaseAfterReasoning() throws NotStartedException, AlreadyStartedException, - EDBConfigurationException, IOException, NonExistingPredicateException { - // Start VLog - final VLog vLog = new VLog(); - try { - vLog.start(StringUtils.EMPTY, false); - vLog.materialize(true); - - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", - VLogExpressions.makeVariable("?x")); - - vLog.query(queryAtom); - } finally { - vLog.stop(); - } - } - -} \ No newline at end of file +package org.semanticweb.rulewerk.core.reasoner.vlog; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import org.junit.Test; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Rule; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.RuleRewriteStrategy; + +/** + * Tests VLog functionality when data (facts) is loaded exclusively from memory. + * + * @author Irina.Dragoste + * + */ +public class VLogDataFromMemoryTest { + + @Test + public void testVLogSimpleInference() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { + + final String[][] argsAMatrix = { { "a" }, { "b" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + // tuples: [[a], [b]] + final Set> tuples = new HashSet<>(); + tuples.add(Arrays.asList(VLogExpressions.makeConstant("a"))); + tuples.add(Arrays.asList(VLogExpressions.makeConstant("b"))); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); // Assert A(a), A(b) + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + + // Querying A(?X) before materialize + final TermQueryResultIterator queryResultIteratorAx1 = vLog.query(atomAx); + final Set> queryAxResults1 = VLogQueryResultUtils.collectResults(queryResultIteratorAx1); + assertEquals(tuples, queryAxResults1); + + // Querying B(?X) before materialize + final TermQueryResultIterator queryResultIteratorBx1 = vLog.query(atomBx); + assertFalse(queryResultIteratorBx1.hasNext()); + queryResultIteratorBx1.close(); + + vLog.materialize(true); + + // Querying B(?X) after materialize + final TermQueryResultIterator queryResultIteratorBx2 = vLog.query(atomBx); + final Set> queryResultsBx = VLogQueryResultUtils.collectResults(queryResultIteratorBx2); + assertEquals(tuples, queryResultsBx); + + final TermQueryResultIterator queryResultIteratorAx2 = vLog.query(atomAx); + final Set> queryAxResults2 = VLogQueryResultUtils.collectResults(queryResultIteratorAx2); + assertEquals(tuples, queryAxResults2); + + vLog.stop(); + } + + @Test + public void testBooleanQueryTrueIncludeConstantsFalse() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + // Creating rules and facts + final String[][] argsAMatrix = { { "a", "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); + final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); + + final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); + assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); + final Term[] expectedQueryResult = { constantA, constantA }; + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); + assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); + defaultIteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorNoConstantsNoBlanks = vLog.query(booleanQueryAtomBa, false, false); + assertTrue(iteratorNoConstantsNoBlanks.hasNext()); + assertTrue(iteratorNoConstantsNoBlanks.next().length == 0); + iteratorNoConstantsNoBlanks.close(); + + final TermQueryResultIterator iteratorNoConstantsWithBlanks = vLog.query(booleanQueryAtomBa, false, true); + assertTrue(iteratorNoConstantsWithBlanks.hasNext()); + Assert.assertTrue(iteratorNoConstantsWithBlanks.next().length == 0); + assertFalse(iteratorNoConstantsWithBlanks.hasNext()); + iteratorNoConstantsWithBlanks.close(); + + vLog.stop(); + } + + @Test + public void testBooleanQueryTrueIncludeConstantsTrue() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + // Creating rules and facts + final String[][] argsAMatrix = { { "a", "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); // A(x,x) -> B(x,x) + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); // assert A(a,a) + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); + final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); + + final Term[] expectedQueryResult = { constantA, constantA }; + + final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); + assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); + assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); + defaultIteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa, true, false); + assertTrue(iteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult3 = iteratorWithConstantsAndBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult3); + assertFalse(iteratorWithConstantsAndBlanks.hasNext()); + iteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorWithConstantsNoBlanks = vLog.query(booleanQueryAtomBa, true, true); + assertTrue(iteratorWithConstantsNoBlanks.hasNext()); + final Term[] actualQueryResult2 = iteratorWithConstantsNoBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult2); + assertFalse(iteratorWithConstantsNoBlanks.hasNext()); + iteratorWithConstantsNoBlanks.close(); + + vLog.stop(); + } + + @Test + public void testBooleanQueryFalse() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { + final String[][] argsAMatrix = { { "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantB = VLogExpressions.makeConstant("b"); + final karmaresearch.vlog.Atom booleanQueryAtomBb = new karmaresearch.vlog.Atom("B", constantB); + + final TermQueryResultIterator queryResultEnnumeration = vLog.query(booleanQueryAtomBb); + assertFalse(queryResultEnnumeration.hasNext()); + + queryResultEnnumeration.close(); + vLog.stop(); + } + + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseBeforeReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { + // Start VLog + final VLog vLog = new VLog(); + try { + vLog.start(StringUtils.EMPTY, false); + + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); + + vLog.query(queryAtom); + } finally { + vLog.stop(); + } + } + + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseAfterReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { + // Start VLog + final VLog vLog = new VLog(); + try { + vLog.start(StringUtils.EMPTY, false); + vLog.materialize(true); + + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); + + vLog.query(queryAtom); + } finally { + vLog.stop(); + } + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java index 622888073..a16b34c89 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java index d03bf4fac..d87a36190 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogExpressions.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java index e61d46421..a23dae441 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryResultUtils.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java index df38a52ab..57ca22e3f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogQueryTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java index 428e31226..ed85ea768 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/vlog/VLogTermNamesTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java index 838a261f5..85f9e8b3b 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java index fede387b0..b44afdbf8 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java @@ -4,9 +4,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -78,4 +78,4 @@ public static void main(final String[] args) throws IOException, ParsingExceptio } -} \ No newline at end of file +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java index 2f895847e..b80163fb4 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index dafb60680..f379970ba 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java index 6ea419e31..0647c1edc 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index 9591dd3f0..89cadd193 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java index 9fcb968a3..0b76c4bb3 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java index 8e974a814..2aa2c02a2 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java index 6f3a92d3f..8eb7a2a8f 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -1,207 +1,207 @@ -package org.semanticweb.rulewerk.examples.core; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.net.URL; -import java.util.Arrays; -import java.util.LinkedHashSet; -import java.util.List; - -import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.DataSource; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.Reasoner; -import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; -import org.semanticweb.rulewerk.examples.ExamplesUtils; - -/** - * This is a simple example of adding data from the result of a SPARQL query on - * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In - * this example, we will query Wikidata for titles of publications that have - * authors who have children together. - * - * @author Irina Dragoste - * - */ -public class AddDataFromSparqlQueryResults { - - /** - * WikiData author - * property id. - */ - private static final String WIKIDATA_AUTHOR_PROPERTY = "wdt:P50"; - /** - * WikiData title - * property id. Published title of a work, such as a newspaper article, a - * literary work, a website, or a performance work - */ - private static final String WIKIDATA_TITLE_PROPERTY = "wdt:P1476"; - /** - * WikiData mother - * property id. - */ - private static final String WIKIDATA_MOTHER_PROPERTY = "wdt:P25"; - /** - * WikiData father - * property id. - */ - private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; - - public static void main(final String[] args) throws IOException { - - ExamplesUtils.configureLogging(); - - /* - * The WikiData SPARQL query endpoint. - */ - final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); - - /* - * SPARQL query body that looks for publications where two authors of the - * publication are the mother, respectively father of the same child. - */ - final String queryBody = " ?publication " + WIKIDATA_TITLE_PROPERTY + " ?title ." + "?publication " - + WIKIDATA_AUTHOR_PROPERTY + " ?mother ." + " ?publication " + WIKIDATA_AUTHOR_PROPERTY + " ?father ." - + " ?child " + WIKIDATA_MOTHER_PROPERTY + " ?mother ." + " ?child " + WIKIDATA_FATHER_PROPERTY - + " ?father ."; - - final Variable titleVariable = Expressions.makeUniversalVariable("title"); - final Variable motherVariable = Expressions.makeUniversalVariable("mother"); - final Variable fatherVariable = Expressions.makeUniversalVariable("father"); - - /* - * The query variables are the variables from the query body which will appear - * in the query result, in the given order. Fact resulting from this query will - * have as terms the title of the publication, the mother publication author and - * the father publication author. - */ - final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(titleVariable, motherVariable, fatherVariable)); - - /* - * We query Wikidata with the SPARQL query composed of the query variables and - * query body. The query result is a DataSource we will associate to a - * predicate. - */ - final DataSource sparqlQueryResultDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, - queryVariables, queryBody); - - /* - * Predicate that will be mapped to the SPARQL query result. It must have the - * same arity as the query variables size. In this case, we have 3 query - * variables (title, mother and father). - */ - final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); - - try (Reasoner reasoner = Reasoner.getInstance()) { - - final KnowledgeBase kb = reasoner.getKnowledgeBase(); - /* - * The SPARQL query results will be added to the reasoner knowledge base, as - * facts associated to the predicate publicationParents. - */ - - kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); - reasoner.reason(); - - /* - * We construct a query PositiveLiteral for the predicated associated to the - * SPARQL query result. - */ - final PositiveLiteral query = Expressions.makePositiveLiteral(queryPredicate, Expressions.makeUniversalVariable("x"), - Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); - - /* We query the reasoner for facts of the SPARQL query result predicate. */ - System.out.println("Titles of publications by co-authors who have a child together:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(query, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out.println("- title: " + queryResultTerms.get(0) + ", mother author: " - + queryResultTerms.get(1) + ", father author: " + queryResultTerms.get(2)); - }); - } - - /* - * To do some basic reasoning, we would now like to add the following rule that - * extracts (unique) mothers, fathers, and pairs from the queried data: - * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- - * publicationParents(?x, ?y, ?z) . - */ - final PositiveLiteral haveChildrenTogether = Expressions.makePositiveLiteral("haveChildrenTogether", - Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); - final PositiveLiteral isMother = Expressions.makePositiveLiteral("isMother", Expressions.makeUniversalVariable("y")); - final PositiveLiteral isFather = Expressions.makePositiveLiteral("isFather", Expressions.makeUniversalVariable("z")); - final Conjunction ruleHeadConjunction = Expressions - .makePositiveConjunction(haveChildrenTogether, isMother, isFather); - final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); - - /* - * We add the created rule, and reason on the data added from the Wikidata - * SPARQL query result. - */ - kb.addStatement(rule); - reasoner.reason(); - - /* We query the reasoner for facts of the haveChildrenTogether predicate. */ - System.out.println("Co-authors who have a child:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(haveChildrenTogether, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out - .println("- author1: " + queryResultTerms.get(0) + ", author2: " + queryResultTerms.get(1)); - }); - } - - /* We query the reasoner for facts of the isMother predicate. */ - System.out.println("Mothers:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isMother, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out.println("- mother: " + queryResultTerms.get(0)); - }); - } - - /* We query the reasoner for facts of the isFather predicate. */ - System.out.println("Fathers:"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isFather, false)) { - queryResultIterator.forEachRemaining(queryResult -> { - final List queryResultTerms = queryResult.getTerms(); - - System.out.println("- father: " + queryResultTerms.get(0)); - }); - } - - } - } - -} +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.examples.ExamplesUtils; + +/** + * This is a simple example of adding data from the result of a SPARQL query on + * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In + * this example, we will query Wikidata for titles of publications that have + * authors who have children together. + * + * @author Irina Dragoste + * + */ +public class AddDataFromSparqlQueryResults { + + /** + * WikiData author + * property id. + */ + private static final String WIKIDATA_AUTHOR_PROPERTY = "wdt:P50"; + /** + * WikiData title + * property id. Published title of a work, such as a newspaper article, a + * literary work, a website, or a performance work + */ + private static final String WIKIDATA_TITLE_PROPERTY = "wdt:P1476"; + /** + * WikiData mother + * property id. + */ + private static final String WIKIDATA_MOTHER_PROPERTY = "wdt:P25"; + /** + * WikiData father + * property id. + */ + private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; + + public static void main(final String[] args) throws IOException { + + ExamplesUtils.configureLogging(); + + /* + * The WikiData SPARQL query endpoint. + */ + final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + /* + * SPARQL query body that looks for publications where two authors of the + * publication are the mother, respectively father of the same child. + */ + final String queryBody = " ?publication " + WIKIDATA_TITLE_PROPERTY + " ?title ." + "?publication " + + WIKIDATA_AUTHOR_PROPERTY + " ?mother ." + " ?publication " + WIKIDATA_AUTHOR_PROPERTY + " ?father ." + + " ?child " + WIKIDATA_MOTHER_PROPERTY + " ?mother ." + " ?child " + WIKIDATA_FATHER_PROPERTY + + " ?father ."; + + final Variable titleVariable = Expressions.makeUniversalVariable("title"); + final Variable motherVariable = Expressions.makeUniversalVariable("mother"); + final Variable fatherVariable = Expressions.makeUniversalVariable("father"); + + /* + * The query variables are the variables from the query body which will appear + * in the query result, in the given order. Fact resulting from this query will + * have as terms the title of the publication, the mother publication author and + * the father publication author. + */ + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(titleVariable, motherVariable, fatherVariable)); + + /* + * We query Wikidata with the SPARQL query composed of the query variables and + * query body. The query result is a DataSource we will associate to a + * predicate. + */ + final DataSource sparqlQueryResultDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + queryVariables, queryBody); + + /* + * Predicate that will be mapped to the SPARQL query result. It must have the + * same arity as the query variables size. In this case, we have 3 query + * variables (title, mother and father). + */ + final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); + + try (Reasoner reasoner = Reasoner.getInstance()) { + + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + /* + * The SPARQL query results will be added to the reasoner knowledge base, as + * facts associated to the predicate publicationParents. + */ + + kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); + reasoner.reason(); + + /* + * We construct a query PositiveLiteral for the predicated associated to the + * SPARQL query result. + */ + final PositiveLiteral query = Expressions.makePositiveLiteral(queryPredicate, Expressions.makeUniversalVariable("x"), + Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); + + /* We query the reasoner for facts of the SPARQL query result predicate. */ + System.out.println("Titles of publications by co-authors who have a child together:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(query, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- title: " + queryResultTerms.get(0) + ", mother author: " + + queryResultTerms.get(1) + ", father author: " + queryResultTerms.get(2)); + }); + } + + /* + * To do some basic reasoning, we would now like to add the following rule that + * extracts (unique) mothers, fathers, and pairs from the queried data: + * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- + * publicationParents(?x, ?y, ?z) . + */ + final PositiveLiteral haveChildrenTogether = Expressions.makePositiveLiteral("haveChildrenTogether", + Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); + final PositiveLiteral isMother = Expressions.makePositiveLiteral("isMother", Expressions.makeUniversalVariable("y")); + final PositiveLiteral isFather = Expressions.makePositiveLiteral("isFather", Expressions.makeUniversalVariable("z")); + final Conjunction ruleHeadConjunction = Expressions + .makePositiveConjunction(haveChildrenTogether, isMother, isFather); + final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); + + /* + * We add the created rule, and reason on the data added from the Wikidata + * SPARQL query result. + */ + kb.addStatement(rule); + reasoner.reason(); + + /* We query the reasoner for facts of the haveChildrenTogether predicate. */ + System.out.println("Co-authors who have a child:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(haveChildrenTogether, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out + .println("- author1: " + queryResultTerms.get(0) + ", author2: " + queryResultTerms.get(1)); + }); + } + + /* We query the reasoner for facts of the isMother predicate. */ + System.out.println("Mothers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isMother, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- mother: " + queryResultTerms.get(0)); + }); + } + + /* We query the reasoner for facts of the isFather predicate. */ + System.out.println("Fathers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isFather, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- father: " + queryResultTerms.get(0)); + }); + } + + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index fdf14be9c..b29262a49 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java index 387893b9d..ae4042817 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java @@ -1,136 +1,136 @@ -package org.semanticweb.rulewerk.examples.core; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.reasoner.Algorithm; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; - -/** - * This example shows non-termination of the Skolem Chase, versus termination of - * the Restricted Chase on the same set of rules and facts. Note that the - * Restricted Chase is the default reasoning algorithm, as it terminates in most - * cases and generates a smaller number of facts. - * - * @author Irina Dragoste - * - */ -public class SkolemVsRestrictedChaseTermination { - - public static void main(final String[] args) throws IOException, ParsingException { - - ExamplesUtils.configureLogging(); - - final String facts = ""// define some facts: - + "bicycle(bicycle1) ." // - + "hasPart(bicycle1, wheel1) ." // - + "wheel(wheel1) ." // - + "bicycle(bicycle2) ."; - - final String rules = "" - // every bicycle has some part that is a wheel: - + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // - // every wheel is part of some bicycle: - + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." // - // hasPart and isPartOf are mutually inverse relations: - + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // - + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; - - /* - * 1. Load facts into a knowledge base - */ - final KnowledgeBase kb = RuleParser.parse(facts); - - /* - * 2. Load the knowledge base into the reasoner - */ - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - - /* - * 3. Query the reasoner before applying rules for fact materialisation - */ - final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); - - /* See that there is no fact HasPartIDB before reasoning. */ - System.out.println("Before reasoning is started, no inferrences have been computed yet."); - ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); - - /* - * 4. Load rules into the knowledge base - */ - RuleParser.parseInto(kb, rules); - /* - * 5. Materialise with the Skolem Chase. As the Skolem Chase is known not to - * terminate for this set of rules and facts, it is interrupted after one - * second. - */ - reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - reasoner.setReasoningTimeout(1); - System.out.println("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ..."); - final boolean skolemChaseFinished = reasoner.reason(); - - /* Verify that the Skolem Chase did not terminate before timeout. */ - System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); - /* - * See that the Skolem Chase generated a very large number of facts in 1 second, - * extensively introducing new unnamed individuals to satisfy existential - * restrictions. - */ - System.out.println("Before the timeout, the Skolem chase had produced " - + reasoner.countQueryAnswers(queryHasPart).getCount() + " results for hasPart(?X, ?Y)."); - - /* - * 6. We reset the reasoner to discard all inferences, and apply the Restricted - * Chase on the same set of rules and facts - */ - System.out.println(); - reasoner.resetReasoner(); - - /* - * 7. Materialise with the Restricted Chase. As the Restricted Chase is known to - * terminate for this set of rules and facts, we will not interrupt it. - */ - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - reasoner.setReasoningTimeout(null); - final long restrictedChaseStartTime = System.currentTimeMillis(); - System.out.println("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); - reasoner.reason(); - - /* The Restricted Chase terminates: */ - final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; - System.out.println("The Restricted Chase finished in " + restrictedChaseDuration + " ms."); - - /* - * See that the Restricted Chase generated a small number of facts, reusing - * individuals that satisfy existential restrictions. - */ - ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); - } - } - -} +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example shows non-termination of the Skolem Chase, versus termination of + * the Restricted Chase on the same set of rules and facts. Note that the + * Restricted Chase is the default reasoning algorithm, as it terminates in most + * cases and generates a smaller number of facts. + * + * @author Irina Dragoste + * + */ +public class SkolemVsRestrictedChaseTermination { + + public static void main(final String[] args) throws IOException, ParsingException { + + ExamplesUtils.configureLogging(); + + final String facts = ""// define some facts: + + "bicycle(bicycle1) ." // + + "hasPart(bicycle1, wheel1) ." // + + "wheel(wheel1) ." // + + "bicycle(bicycle2) ."; + + final String rules = "" + // every bicycle has some part that is a wheel: + + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // + // every wheel is part of some bicycle: + + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." // + // hasPart and isPartOf are mutually inverse relations: + + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; + + /* + * 1. Load facts into a knowledge base + */ + final KnowledgeBase kb = RuleParser.parse(facts); + + /* + * 2. Load the knowledge base into the reasoner + */ + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + /* + * 3. Query the reasoner before applying rules for fact materialisation + */ + final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); + + /* See that there is no fact HasPartIDB before reasoning. */ + System.out.println("Before reasoning is started, no inferrences have been computed yet."); + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); + + /* + * 4. Load rules into the knowledge base + */ + RuleParser.parseInto(kb, rules); + /* + * 5. Materialise with the Skolem Chase. As the Skolem Chase is known not to + * terminate for this set of rules and facts, it is interrupted after one + * second. + */ + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + reasoner.setReasoningTimeout(1); + System.out.println("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ..."); + final boolean skolemChaseFinished = reasoner.reason(); + + /* Verify that the Skolem Chase did not terminate before timeout. */ + System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); + /* + * See that the Skolem Chase generated a very large number of facts in 1 second, + * extensively introducing new unnamed individuals to satisfy existential + * restrictions. + */ + System.out.println("Before the timeout, the Skolem chase had produced " + + reasoner.countQueryAnswers(queryHasPart).getCount() + " results for hasPart(?X, ?Y)."); + + /* + * 6. We reset the reasoner to discard all inferences, and apply the Restricted + * Chase on the same set of rules and facts + */ + System.out.println(); + reasoner.resetReasoner(); + + /* + * 7. Materialise with the Restricted Chase. As the Restricted Chase is known to + * terminate for this set of rules and facts, we will not interrupt it. + */ + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + reasoner.setReasoningTimeout(null); + final long restrictedChaseStartTime = System.currentTimeMillis(); + System.out.println("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); + reasoner.reason(); + + /* The Restricted Chase terminates: */ + final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; + System.out.println("The Restricted Chase finished in " + restrictedChaseDuration + " ms."); + + /* + * See that the Restricted Chase generated a small number of facts, reusing + * individuals that satisfy existential restrictions. + */ + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index ccd12e5ec..646b65f5a 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index 55e498784..6000043b8 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 9e7b7504e..774b10265 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Examples + * Rulewerk Examples * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index 67be04091..dad6f26f1 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -1,141 +1,141 @@ -package org.semanticweb.rulewerk.examples.owlapi; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Set; - -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Rule; -import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; - -/** - * This example shows how vlog4j-owlapi library (class - * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into - * vlog4j-core {@link Rule}s and {@link Fact}s. - * - * @author Irina Dragoste - * - */ -public class OwlOntologyToRulesAndFacts { - - public static void main(final String[] args) throws OWLOntologyCreationException, IOException { - - /* Bike ontology is loaded from a Bike file using OWL API */ - final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); - final OWLOntology ontology = ontologyManager - .loadOntologyFromOntologyDocument(new File(ExamplesUtils.INPUT_FOLDER + "owl/bike.owl")); - - /* - * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in - * source ontology to target Rule and Atom objects - */ - final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); - owlToRulesConverter.addOntology(ontology); - - /* Print out the Rules extracted from bike ontology. */ - System.out.println("Rules extracted from Bike ontology:"); - final Set rules = owlToRulesConverter.getRules(); - for (final Rule rule : rules) { - System.out.println(" - rule: " + rule); - } - System.out.println(); - - /* Print out Facts extracted from bike ontology */ - System.out.println("Facts extracted from Bike ontology:"); - final Set facts = owlToRulesConverter.getFacts(); - for (final PositiveLiteral fact : facts) { - System.out.println(" - fact: " + fact); - } - System.out.println(); - - final KnowledgeBase kb = new KnowledgeBase(); - kb.addStatements(new ArrayList<>(owlToRulesConverter.getRules())); - kb.addStatements(owlToRulesConverter.getFacts()); - - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - /* - * Load rules and facts obtained from the ontology, and reason over loaded - * ontology with the default algorithm Restricted Chase - */ - System.out.println("Reasoning default algorithm: " + reasoner.getAlgorithm()); - reasoner.reason(); - - /* Query for the parts of bike constant "b2". */ - final Variable vx = Expressions.makeUniversalVariable("x"); - final Constant b2 = Expressions.makeAbstractConstant("http://www.bike-example.ontology#b2"); - - final PositiveLiteral b2HasPart = Expressions - .makePositiveLiteral("http://www.bike-example.ontology#hasPart", b2, vx); - System.out.println("Answers to query " + b2HasPart + " :"); - - /* - * See that an unnamed individual has been introduced to satisfy - * owl:someValuesFrom restriction: - * - * :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; - * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . - */ - try (QueryResultIterator answers = reasoner.answerQuery(b2HasPart, true);) { - answers.forEachRemaining(answer -> { - final Term constantB2 = answer.getTerms().get(0); - final Term term = answer.getTerms().get(1); - System.out.println(" - " + constantB2 + " hasPart " + term); - System.out.println(" Term " + term + " is of type " + term.getType()); - }); - } - - final PositiveLiteral isPartOfB2 = Expressions - .makePositiveLiteral("http://www.bike-example.ontology#isPartOf", vx, b2); - - System.out.println("Answers to query " + isPartOfB2 + " :"); - /* - * See that the same unnamed individual is part of Bike b2, satisfying - * restriction :Wheel rdf:type owl:Class ; rdfs:subClassOf [ rdf:type - * owl:Restriction ; owl:onProperty :isPartOf ; owl:someValuesFrom :Bike ] . - */ - try (QueryResultIterator answers = reasoner.answerQuery(isPartOfB2, true);) { - answers.forEachRemaining(answer -> { - final Term term = answer.getTerms().get(0); - final Term constantB2 = answer.getTerms().get(1); - System.out.println(" - " + term + " isPartOf " + constantB2); - System.out.println(" Term " + term + " is of type " + term.getType()); - }); - } - - } - } -} +package org.semanticweb.rulewerk.examples.owlapi; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Set; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; + +/** + * This example shows how vlog4j-owlapi library (class + * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into + * vlog4j-core {@link Rule}s and {@link Fact}s. + * + * @author Irina Dragoste + * + */ +public class OwlOntologyToRulesAndFacts { + + public static void main(final String[] args) throws OWLOntologyCreationException, IOException { + + /* Bike ontology is loaded from a Bike file using OWL API */ + final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + final OWLOntology ontology = ontologyManager + .loadOntologyFromOntologyDocument(new File(ExamplesUtils.INPUT_FOLDER + "owl/bike.owl")); + + /* + * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in + * source ontology to target Rule and Atom objects + */ + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + owlToRulesConverter.addOntology(ontology); + + /* Print out the Rules extracted from bike ontology. */ + System.out.println("Rules extracted from Bike ontology:"); + final Set rules = owlToRulesConverter.getRules(); + for (final Rule rule : rules) { + System.out.println(" - rule: " + rule); + } + System.out.println(); + + /* Print out Facts extracted from bike ontology */ + System.out.println("Facts extracted from Bike ontology:"); + final Set facts = owlToRulesConverter.getFacts(); + for (final PositiveLiteral fact : facts) { + System.out.println(" - fact: " + fact); + } + System.out.println(); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(new ArrayList<>(owlToRulesConverter.getRules())); + kb.addStatements(owlToRulesConverter.getFacts()); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + /* + * Load rules and facts obtained from the ontology, and reason over loaded + * ontology with the default algorithm Restricted Chase + */ + System.out.println("Reasoning default algorithm: " + reasoner.getAlgorithm()); + reasoner.reason(); + + /* Query for the parts of bike constant "b2". */ + final Variable vx = Expressions.makeUniversalVariable("x"); + final Constant b2 = Expressions.makeAbstractConstant("http://www.bike-example.ontology#b2"); + + final PositiveLiteral b2HasPart = Expressions + .makePositiveLiteral("http://www.bike-example.ontology#hasPart", b2, vx); + System.out.println("Answers to query " + b2HasPart + " :"); + + /* + * See that an unnamed individual has been introduced to satisfy + * owl:someValuesFrom restriction: + * + * :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; + * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(b2HasPart, true);) { + answers.forEachRemaining(answer -> { + final Term constantB2 = answer.getTerms().get(0); + final Term term = answer.getTerms().get(1); + System.out.println(" - " + constantB2 + " hasPart " + term); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); + } + + final PositiveLiteral isPartOfB2 = Expressions + .makePositiveLiteral("http://www.bike-example.ontology#isPartOf", vx, b2); + + System.out.println("Answers to query " + isPartOfB2 + " :"); + /* + * See that the same unnamed individual is part of Bike b2, satisfying + * restriction :Wheel rdf:type owl:Class ; rdfs:subClassOf [ rdf:type + * owl:Restriction ; owl:onProperty :isPartOf ; owl:someValuesFrom :Bike ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(isPartOfB2, true);) { + answers.forEachRemaining(answer -> { + final Term term = answer.getTerms().get(0); + final Term constantB2 = answer.getTerms().get(1); + System.out.println(" - " + term + " isPartOf " + constantB2); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); + } + + } + } +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index d1aaa22e3..251f89777 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -1,184 +1,184 @@ -package org.semanticweb.rulewerk.examples.rdf; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.Set; - -import org.openrdf.model.Model; -import org.openrdf.model.impl.LinkedHashModel; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.Rio; -import org.openrdf.rio.helpers.StatementCollector; -import org.semanticweb.rulewerk.core.model.api.Constant; -import org.semanticweb.rulewerk.core.model.api.Fact; -import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; -import org.semanticweb.rulewerk.core.model.api.Predicate; -import org.semanticweb.rulewerk.core.model.api.Variable; -import org.semanticweb.rulewerk.core.model.implementation.Expressions; -import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; -import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; -import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; -import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.parser.ParsingException; -import org.semanticweb.rulewerk.parser.RuleParser; -import org.semanticweb.rulewerk.rdf.RdfModelConverter; - -/** - * This example shows how vlog4j-rdf library's utility class - * {@link RdfModelConverter} can be used to convert RDF {@link Model}s from - * various types of RDF resources to vlog4j-core {@code Atom} sets. - * - * @author Irina Dragoste - * - */ -public class AddDataFromRdfModel { - - public static void main(final String[] args) - throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { - - ExamplesUtils.configureLogging(); - - /* - * Local file containing metadata of publications from ISWC'16 conference, in - * RDF/XML format. - */ - final File rdfXMLResourceFile = new File(ExamplesUtils.INPUT_FOLDER + "rdf/iswc-2016-complete-alignments.rdf"); - final FileInputStream inputStreamISWC2016 = new FileInputStream(rdfXMLResourceFile); - /* An RDF Model is obtained from parsing the RDF/XML resource. */ - final Model rdfModelISWC2016 = parseRdfResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), - RDFFormat.RDFXML); - - /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". - */ - final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); - System.out.println("Example triple fact from iswc-2016 dataset:"); - System.out.println(" - " + tripleFactsISWC2016.iterator().next()); - - /* - * URL of online resource containing metadata of publications from ISWC'17 - * conference, in TURTLE format. - */ - final URL turtleResourceURL = new URL( - "http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2017-complete-alignments.ttl"); - final InputStream inputStreamISWC2017 = turtleResourceURL.openStream(); - /* An RDF Model is obtained from parsing the TURTLE resource. */ - final Model rdfModelISWC2017 = parseRdfResource(inputStreamISWC2017, turtleResourceURL.toURI(), - RDFFormat.TURTLE); - - /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having - * the ternary predicate "TRIPLE". - */ - final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); - System.out.println("Example triple fact from iswc-2017 dataset:"); - System.out.println(" - " + tripleFactsISWC2017.iterator().next()); - - /** - * We wish to combine triples about a person's affiliation, an affiliation's - * organization and an organization's name, to find a person's organization - * name. - */ - - /* Predicate names of the triples found in both RDF files. */ - final Variable varPerson = Expressions.makeUniversalVariable("person"); - final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); - - /* - * Rule that retrieves pairs of persons and their organization name: - */ - final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" - + "@prefix cnf: ." - + "hasOrganizationName(?Person, ?OrgName) :- " - + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," - + " TRIPLE(?Org, cnf:name, ?OrgName) ."; - KnowledgeBase kb; - try { - kb = RuleParser.parse(rules); - } catch (final ParsingException e) { - System.out.println("Failed to parse rules: " + e.getMessage()); - return; - } - kb.addStatements(tripleFactsISWC2016); - kb.addStatements(tripleFactsISWC2017); - - try (VLogReasoner reasoner = new VLogReasoner(kb)) { - reasoner.reason(); - - /* We query for persons whose organization name is "TU Dresden" . */ - final Constant constantTuDresden = Expressions.makeDatatypeConstant("TU Dresden", - "http://www.w3.org/2001/XMLSchema#string"); - /* hasOrganizationName(?person, "TU Dresden") */ - final PositiveLiteral queryTUDresdenParticipantsAtISWC = Expressions - .makePositiveLiteral(predicateHasOrganizationName, varPerson, constantTuDresden); - - System.out.println("\nParticipants at ISWC'16 and '17 from Organization 'TU Dresden':"); - System.out.println("(Answers to query " + queryTUDresdenParticipantsAtISWC + ")\n"); - try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, - false)) { - queryResultIterator.forEachRemaining(answer -> System.out - .println(" - " + answer.getTerms().get(0) + ", organization " + answer.getTerms().get(1))); - } - - } - - } - - /** - * Parses the data from the supplied InputStream, using the supplied baseURI to - * resolve any relative URI references. - * - * @param inputStream The content to be parsed, expected to be in the given - * {@code rdfFormat}. - * @param baseURI The URI associated with the data in the InputStream. - * @param rdfFormat The expected RDFformat of the inputStream resource that is - * to be parsed. - * @return A Model containing the RDF triples. Blanks have unique ids across - * different models. - * @throws IOException If an I/O error occurred while data was read from - * the InputStream. - * @throws RDFParseException If the parser has found an unrecoverable parse - * error. - * @throws RDFHandlerException If the configured statement handler has - * encountered an unrecoverable error. - */ - private static Model parseRdfResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) - throws IOException, RDFParseException, RDFHandlerException { - final Model model = new LinkedHashModel(); - final RDFParser rdfParser = Rio.createParser(rdfFormat); - rdfParser.setRDFHandler(new StatementCollector(model)); - rdfParser.parse(inputStream, baseURI.toString()); - - return model; - } - -} +package org.semanticweb.rulewerk.examples.rdf; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Set; + +import org.openrdf.model.Model; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; + +/** + * This example shows how vlog4j-rdf library's utility class + * {@link RdfModelConverter} can be used to convert RDF {@link Model}s from + * various types of RDF resources to vlog4j-core {@code Atom} sets. + * + * @author Irina Dragoste + * + */ +public class AddDataFromRdfModel { + + public static void main(final String[] args) + throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { + + ExamplesUtils.configureLogging(); + + /* + * Local file containing metadata of publications from ISWC'16 conference, in + * RDF/XML format. + */ + final File rdfXMLResourceFile = new File(ExamplesUtils.INPUT_FOLDER + "rdf/iswc-2016-complete-alignments.rdf"); + final FileInputStream inputStreamISWC2016 = new FileInputStream(rdfXMLResourceFile); + /* An RDF Model is obtained from parsing the RDF/XML resource. */ + final Model rdfModelISWC2016 = parseRdfResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), + RDFFormat.RDFXML); + + /* + * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); + System.out.println("Example triple fact from iswc-2016 dataset:"); + System.out.println(" - " + tripleFactsISWC2016.iterator().next()); + + /* + * URL of online resource containing metadata of publications from ISWC'17 + * conference, in TURTLE format. + */ + final URL turtleResourceURL = new URL( + "http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2017-complete-alignments.ttl"); + final InputStream inputStreamISWC2017 = turtleResourceURL.openStream(); + /* An RDF Model is obtained from parsing the TURTLE resource. */ + final Model rdfModelISWC2017 = parseRdfResource(inputStreamISWC2017, turtleResourceURL.toURI(), + RDFFormat.TURTLE); + + /* + * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); + System.out.println("Example triple fact from iswc-2017 dataset:"); + System.out.println(" - " + tripleFactsISWC2017.iterator().next()); + + /** + * We wish to combine triples about a person's affiliation, an affiliation's + * organization and an organization's name, to find a person's organization + * name. + */ + + /* Predicate names of the triples found in both RDF files. */ + final Variable varPerson = Expressions.makeUniversalVariable("person"); + final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); + + /* + * Rule that retrieves pairs of persons and their organization name: + */ + final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" + + "@prefix cnf: ." + + "hasOrganizationName(?Person, ?OrgName) :- " + + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," + + " TRIPLE(?Org, cnf:name, ?OrgName) ."; + KnowledgeBase kb; + try { + kb = RuleParser.parse(rules); + } catch (final ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + kb.addStatements(tripleFactsISWC2016); + kb.addStatements(tripleFactsISWC2017); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + /* We query for persons whose organization name is "TU Dresden" . */ + final Constant constantTuDresden = Expressions.makeDatatypeConstant("TU Dresden", + "http://www.w3.org/2001/XMLSchema#string"); + /* hasOrganizationName(?person, "TU Dresden") */ + final PositiveLiteral queryTUDresdenParticipantsAtISWC = Expressions + .makePositiveLiteral(predicateHasOrganizationName, varPerson, constantTuDresden); + + System.out.println("\nParticipants at ISWC'16 and '17 from Organization 'TU Dresden':"); + System.out.println("(Answers to query " + queryTUDresdenParticipantsAtISWC + ")\n"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, + false)) { + queryResultIterator.forEachRemaining(answer -> System.out + .println(" - " + answer.getTerms().get(0) + ", organization " + answer.getTerms().get(1))); + } + + } + + } + + /** + * Parses the data from the supplied InputStream, using the supplied baseURI to + * resolve any relative URI references. + * + * @param inputStream The content to be parsed, expected to be in the given + * {@code rdfFormat}. + * @param baseURI The URI associated with the data in the InputStream. + * @param rdfFormat The expected RDFformat of the inputStream resource that is + * to be parsed. + * @return A Model containing the RDF triples. Blanks have unique ids across + * different models. + * @throws IOException If an I/O error occurred while data was read from + * the InputStream. + * @throws RDFParseException If the parser has found an unrecoverable parse + * error. + * @throws RDFHandlerException If the configured statement handler has + * encountered an unrecoverable error. + */ + private static Model parseRdfResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) + throws IOException, RDFParseException, RDFHandlerException { + final Model model = new LinkedHashModel(); + final RDFParser rdfParser = Rio.createParser(rdfFormat); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseURI.toString()); + + return model; + } + +} diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java index 773bed754..ada714cbb 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java @@ -3,9 +3,9 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index e02365c20..0c077263e 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java index 3a03c5993..f23cb6042 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java index 3e81a6909..e522de86d 100644 --- a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4J Graal Import Components + * Rulewerk Graal Import Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java index fdee7057b..7c8ab043c 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java index d77a95389..11818553f 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java index 4a958d114..2857286c6 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index f16abe0f0..6564a0a8c 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -11,16 +11,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index 254da4dcd..4f1841ebc 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 2a6f7ea05..aa02ca07a 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -11,9 +11,9 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index e5386caa4..4273bae87 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java index 6ed6fee3a..d561b779c 100644 --- a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java index a6cd79f27..0bf3ea7be 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java index bf89afe17..e17ae1d1e 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DataSourceDeclarationHandler.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java index 12ad24f5e..bc94fc7ba 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java index f34bbc8c8..cf45c534a 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java index 4d1b77764..c31270d35 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveArgument.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java index fca86b07b..01c9fc73c 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index 4319a79ab..72e7d654a 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index 22b268165..ee2a687f1 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java index f98e5639f..abca75072 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java index ca28f7ea5..5ffdcf281 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index eca5940b3..338b024e8 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index d5833b1c6..b27f52d21 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java index 1bda9a0a2..e91632ac9 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -2,9 +2,9 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java index 532219593..337475363 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java index c0130ddad..1de8df9f5 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 5c01d5b4c..f61a80218 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java index ab7e498ad..9a615bf1b 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java @@ -4,16 +4,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java index 7b14d9094..2ce1af622 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveArgumentTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java index 1e2b9adb6..ef0a4eb6c 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java index aa166ede6..910e9375b 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java index 4f22a9a3a..cae03b2c7 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java index 89e5cd847..99c4e513e 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java index 0e46aaf97..639b30c01 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index c69fe217d..21919dd91 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java index e8185268d..dee7199c1 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java index e0a4b38f2..f3e030e02 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j Syntax + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java index 037e2d666..0ba85b209 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java @@ -2,16 +2,16 @@ /*- * #%L - * vlog4j-parser + * Rulewerk Parser * %% - * Copyright (C) 2018 - 2019 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java index e7473f587..e5547b8dd 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java index 59b0ac934..6ead51c2f 100644 --- a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java index ca400540f..fa689c4ad 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java index c0f126e15..052ab1f5a 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestConvertRdfFileToFacts.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java index fc24fcaf5..6297a8968 100644 --- a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/TestReasonOverRdfFacts.java @@ -2,16 +2,16 @@ /*- * #%L - * VLog4j RDF Support + * Rulewerk RDF Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. From dadfc5210dfba5d699600fe65b1f7e5d4f2d7d30 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 15:28:57 +0100 Subject: [PATCH 0820/1255] Rename all references of VLog4j --- .../rulewerk/client/picocli/ClientUtils.java | 8 +- .../client/picocli/PrintQueryResults.java | 12 +-- ...{VLog4jClient.java => RulewerkClient.java} | 16 +-- ...ze.java => RulewerkClientMaterialize.java} | 10 +- .../rulewerk/client/picocli/SaveModel.java | 8 +- .../client/picocli/SaveQueryResults.java | 8 +- .../IncompatiblePredicateArityException.java | 8 +- .../PrefixDeclarationException.java | 6 +- .../exceptions/ReasonerStateException.java | 10 +- ...jException.java => RulewerkException.java} | 16 +-- ...ion.java => RulewerkRuntimeException.java} | 14 +-- .../model/implementation/Expressions.java | 25 +++-- .../MergingPrefixDeclarationRegistry.java | 6 +- .../core/model/implementation/Serializer.java | 40 +++---- .../rulewerk/core/reasoner/KnowledgeBase.java | 14 +-- .../implementation/TermToVLogConverter.java | 34 +++--- .../reasoner/implementation/VLogReasoner.java | 6 +- .../MergingPrefixDeclarationRegistryTest.java | 8 +- .../VLogToModelConverterTest.java | 24 ++--- rulewerk-examples/README.md | 12 +-- .../examples/SimpleReasoningExample.java | 8 +- .../examples/graal/AddDataFromDlgpFile.java | 22 ++-- .../examples/graal/AddDataFromGraal.java | 20 ++-- .../examples/graal/DoidExampleGraal.java | 8 +- .../owlapi/OwlOntologyToRulesAndFacts.java | 14 +-- .../examples/rdf/AddDataFromRdfModel.java | 16 +-- .../rulewerk/graal/GraalConvertException.java | 12 ++- ...ava => GraalToRulewerkModelConverter.java} | 60 +++++------ ...=> GraalToRulewerkModelConverterTest.java} | 100 +++++++++--------- .../rulewerk/parser/ParserConfiguration.java | 6 +- .../rulewerk/parser/ParsingException.java | 8 +- .../rulewerk/parser/javacc/JavaCCParser.jj | 4 +- 32 files changed, 282 insertions(+), 281 deletions(-) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/{VLog4jClient.java => RulewerkClient.java} (70%) rename rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/{VLog4jClientMaterialize.java => RulewerkClientMaterialize.java} (96%) rename rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/{VLog4jException.java => RulewerkException.java} (72%) rename rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/{VLog4jRuntimeException.java => RulewerkRuntimeException.java} (78%) rename rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/{GraalToVLog4JModelConverter.java => GraalToRulewerkModelConverter.java} (87%) rename rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/{GraalToVLog4JModelConverterTest.java => GraalToRulewerkModelConverterTest.java} (69%) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java index edaf61f89..aca4cd136 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; /** - * Utility class for interacting with the vlog4j client. + * Utility class for interacting with the Rulewerk client. * * @author dragoste * @@ -96,7 +96,7 @@ public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final R * * @param queryAtom query to be answered * @param reasoner reasoner to query on - * + * * @return number of answers to the given query */ public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java index 4f44b7f4c..a36f0d85d 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -33,21 +33,21 @@ public class PrintQueryResults { static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true."; /** - * If true, Vlog4jClient will print the size of the query result. Mutually + * If true, RulewerkClient will print the size of the query result. Mutually * exclusive with {@code --print-complete-query-result} * * @default true */ - @Option(names = "--print-query-result-size", description = "Boolean. If true, Vlog4jClient will print the size of the query result. True by default.") + @Option(names = "--print-query-result-size", description = "Boolean. If true, RulewerkClient will print the size of the query result. True by default.") private boolean sizeOnly = true; /** - * If true, Vlog4jClient will print the query result in stdout. Mutually + * If true, RulewerkClient will print the query result in stdout. Mutually * exclusive with {@code --print-query-result-size} * * @default false */ - @Option(names = "--print-complete-query-result", description = "Boolean. If true, Vlog4jClient will print the query result in stdout. False by default.") + @Option(names = "--print-complete-query-result", description = "Boolean. If true, RulewerkClient will print the query result in stdout. False by default.") private boolean complete = false; public PrintQueryResults() { diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java similarity index 70% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java index c2db5d180..54de47a9b 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClient.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClient.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,22 +25,22 @@ import picocli.CommandLine.Command; /** - * Stand alone client for VLog4j. + * Stand alone client for Rulewerk. * * @author Larry Gonzalez * */ -@Command(name = "java -jar VLog4jClient.jar", description = "VLog4jClient: A command line client of VLog4j.", subcommands = { - VLog4jClientMaterialize.class }) -public class VLog4jClient implements Runnable { +@Command(name = "java -jar RulewerkClient.jar", description = "RulewerkClient: A command line client for Rulewerk.", subcommands = { + RulewerkClientMaterialize.class }) +public class RulewerkClient implements Runnable { public static void main(String[] args) { - CommandLine commandline = new CommandLine(new VLog4jClient()); + CommandLine commandline = new CommandLine(new RulewerkClient()); commandline.execute(args); } @Override public void run() { - (new CommandLine(new VLog4jClient())).usage(System.out); + (new CommandLine(new RulewerkClient())).usage(System.out); } } diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java similarity index 96% rename from rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java rename to rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java index 961c3a8ed..91a4d0cda 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/VLog4jClientMaterialize.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -45,12 +45,12 @@ * */ @Command(name = "materialize", description = "Execute the chase and store the literal's extensions") -public class VLog4jClientMaterialize implements Runnable { +public class RulewerkClientMaterialize implements Runnable { private final KnowledgeBase kb = new KnowledgeBase(); private final List queries = new ArrayList<>(); - @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/vlog4j/wiki/Rule-syntax-grammar} syntax", required = true) + @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar} syntax", required = true) private final List ruleFiles = new ArrayList<>(); // TODO @@ -70,7 +70,7 @@ public class VLog4jClientMaterialize implements Runnable { @Option(names = "--timeout", description = "Timeout in seconds. Infinite by default", required = false) private int timeout = 0; - @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. Vlog4jClient will print the size of its extension", required = true) + @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. RulewerkClient will print the size of its extension", required = true) private List queryStrings = new ArrayList<>(); @ArgGroup(exclusive = false) diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java index 354fa52f4..bcd23d052 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,11 +38,11 @@ public class SaveModel { static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path."; /** - * If true, Vlog4jClient will save the model in {@code --output-model-directory} + * If true, RulewerkClient will save the model in {@code --output-model-directory} * * @default false */ - @Option(names = "--save-model", description = "Boolean. If true, Vlog4jClient will save the model into --output-model-directory. False by default.") + @Option(names = "--save-model", description = "Boolean. If true, RulewerkClient will save the model into --output-model-directory. False by default.") private boolean saveModel = false; /** diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java index 8d43da5fd..9ca9bd4fb 100644 --- a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,12 +37,12 @@ public class SaveQueryResults { static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path."; /** - * If true, Vlog4jClient will save the query result in + * If true, RulewerkClient will save the query result in * {@code --output-query-result-directory} * * @default false */ - @Option(names = "--save-query-results", description = "Boolean. If true, Vlog4jClient will save the query result into --output-query-result-directory. False by default.") + @Option(names = "--save-query-results", description = "Boolean. If true, RulewerkClient will save the query result into --output-query-result-directory. False by default.") private boolean saveResults = false; /** diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index 67f033a81..28e22ce99 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,11 +28,11 @@ /** * Expression thrown when attempting to load facts for a {@link Predicate} from * a {@link DataSource} that does not contain data of the specified arity. - * + * * @author Irina Dragoste * */ -public class IncompatiblePredicateArityException extends VLog4jRuntimeException { +public class IncompatiblePredicateArityException extends RulewerkRuntimeException { private static final long serialVersionUID = -5081219042292721026L; private static final String messagePattern = "Predicate arity [{0}] of predicate [{1}] incompatible with arity [{2}] of the data source [{3}]!"; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java index b81bfaffe..eacafd6de 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,7 @@ * #L% */ -public class PrefixDeclarationException extends VLog4jException { +public class PrefixDeclarationException extends RulewerkException { private static final long serialVersionUID = 787997047134745982L; public PrefixDeclarationException(String errorMessage) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index 813035df3..af961ffda 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,11 +27,11 @@ /** * Thrown when an operation that is invalid in current reasoner state is * attempted. - * + * * @author Irina Dragoste * */ -public class ReasonerStateException extends VLog4jRuntimeException { +public class ReasonerStateException extends RulewerkRuntimeException { /** * generated serial version UID @@ -42,7 +42,7 @@ public class ReasonerStateException extends VLog4jRuntimeException { /** * Creates an exception with a logging message for current reasoner state. - * + * * @param state * the current Reasoner state. * @param message diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java similarity index 72% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java index d8c046a19..ea0eaca0e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,30 +21,30 @@ */ /** - * Top-level checked exception for VLog4j system. + * Top-level checked exception for Rulewerk system. * @author Irina Dragoste * */ -public class VLog4jException extends Exception { +public class RulewerkException extends Exception { /** * generated serial version UID */ private static final long serialVersionUID = 8305375071519734590L; - public VLog4jException(Throwable cause) { + public RulewerkException(Throwable cause) { super(cause); } - public VLog4jException(String message, Throwable cause) { + public RulewerkException(String message, Throwable cause) { super(message, cause); } - public VLog4jException(String message) { + public RulewerkException(String message) { super(message); } - public VLog4jException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + public RulewerkException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java similarity index 78% rename from rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java index 65a52d9c6..609e0f882 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/VLog4jRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,23 +21,23 @@ */ /** - * Superclass of unchecked exceptions generated by VLog4j. - * + * Superclass of unchecked exceptions generated by Rulewerk. + * * @author Markus Kroetzsch * */ -public class VLog4jRuntimeException extends RuntimeException { +public class RulewerkRuntimeException extends RuntimeException { /** * Generated serial version ID. */ private static final long serialVersionUID = -6574826887294416900L; - public VLog4jRuntimeException(String message, Throwable cause) { + public RulewerkRuntimeException(String message, Throwable cause) { super(message, cause); } - public VLog4jRuntimeException(String message) { + public RulewerkRuntimeException(String message) { super(message); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java index 24998c007..74529fb51 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -1,7 +1,5 @@ package org.semanticweb.rulewerk.core.model.implementation; -import java.util.ArrayList; - /*- * #%L * Rulewerk Core Components @@ -11,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +20,7 @@ * #L% */ +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -41,8 +40,8 @@ /** * This utilities class provides static methods for creating terms and formulas - * in vlog4j. - * + * in Rulewerk. + * * @author Markus Krötzsch * */ @@ -57,7 +56,7 @@ private Expressions() { /** * Creates a {@link UniversalVariable}. - * + * * @param name name of the variable * @return a {@link UniversalVariable} corresponding to the input. */ @@ -67,7 +66,7 @@ public static UniversalVariable makeUniversalVariable(String name) { /** * Creates an {@link ExistentialVariable}. - * + * * @param name name of the variable * @return a {@link ExistentialVariable} corresponding to the input. */ @@ -77,7 +76,7 @@ public static ExistentialVariable makeExistentialVariable(String name) { /** * Creates an {@link AbstractConstant}. - * + * * @param name name of the constant * @return an {@link AbstractConstant} corresponding to the input. */ @@ -87,7 +86,7 @@ public static AbstractConstant makeAbstractConstant(String name) { /** * Creates a {@link DatatypeConstant} from the given input. - * + * * @param lexicalValue the lexical representation of the data value * @param datatypeIri the full absolute IRI of the datatype of this literal * @return a {@link DatatypeConstant} corresponding to the input. @@ -98,7 +97,7 @@ public static DatatypeConstant makeDatatypeConstant(String lexicalValue, String /** * Creates a {@link LanguageStringConstant} from the given input. - * + * * @param string the string value of the constant * @param languageTag the BCP 47 language tag of the constant; should be in * lower case @@ -110,7 +109,7 @@ public static LanguageStringConstant makeLanguageStringConstant(String string, S /** * Creates a {@link Predicate}. - * + * * @param name non-blank predicate name * @param arity predicate arity, strictly greater than 0 * @return a {@link Predicate} corresponding to the input. @@ -137,7 +136,7 @@ public static Fact makeFact(final String predicateName, final List terms) /** * Creates a {@code Fact}. - * + * * @param predicateName on-blank {@link Predicate} name * @param terms non-empty, non-null array of non-null terms * @return a {@link Fact} with given {@code terms} and {@link Predicate} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 77aa2038b..8b6ebe16b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -46,7 +46,7 @@ final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclar /** * Prefix string to use for generated prefix name */ - private static final String GENERATED_PREFIX_PREFIX_STRING = "vlog4j_generated_"; + private static final String GENERATED_PREFIX_PREFIX_STRING = "rulewerk_generated_"; public MergingPrefixDeclarationRegistry() { super(); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 9df2cde79..bbccf9e1b 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -16,9 +16,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -97,7 +97,7 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see Rule syntax . + * @see Rule syntax . * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -109,7 +109,7 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see Rule syntax . + * @see Rule syntax . * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -130,7 +130,7 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see Rule syntax . + * @see Rule syntax . * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -146,7 +146,7 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see Rule syntax . + * @see Rule syntax . * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -157,7 +157,7 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link AbstractConstant}. * - * @see Rule syntax . + * @see Rule syntax . * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -170,7 +170,7 @@ public static String getString(final AbstractConstant constant, FunctionRule syntax . + * @see Rule syntax . * @param constant a {@link AbstractConstant} * @return String representation corresponding to a given * {@link AbstractConstant}. @@ -183,7 +183,7 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see Rule syntax . + * @see Rule syntax . * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -208,7 +208,7 @@ public static String getConstantName(final LanguageStringConstant languageString *
        • {@code "test"^^} results in {@code "test"^^}, modulo transformation of the datatype IRI.
        • *
        * - * @see Rule syntax . + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -253,7 +253,7 @@ public static String getString(final DatatypeConstant datatypeConstant) { * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -268,7 +268,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant, * Creates a String representation corresponding to the name of a given * {@link DatatypeConstant} including an IRI. * - * @see Rule syntax . + * @see Rule syntax . * @param datatypeConstant a {@link DatatypeConstant} * @return String representation corresponding to a given * {@link DatatypeConstant}. @@ -281,7 +281,7 @@ public static String getConstantName(final DatatypeConstant datatypeConstant) { /** * Creates a String representation of a given {@link ExistentialVariable}. * - * @see Rule syntax . + * @see Rule syntax . * @param existentialVariable a {@link ExistentialVariable} * @return String representation corresponding to a given * {@link ExistentialVariable}. @@ -293,7 +293,7 @@ public static String getString(final ExistentialVariable existentialVariable) { /** * Creates a String representation of a given {@link UniversalVariable}. * - * @see Rule syntax . + * @see Rule syntax . * @param universalVariable a {@link UniversalVariable} * @return String representation corresponding to a given * {@link UniversalVariable}. @@ -305,7 +305,7 @@ public static String getString(final UniversalVariable universalVariable) { /** * Creates a String representation of a given {@link NamedNull}. * - * @see Rule syntax . + * @see Rule syntax . * @param namedNull a {@link NamedNull} * @return String representation corresponding to a given {@link NamedNull}. */ @@ -316,7 +316,7 @@ public static String getString(final NamedNull namedNull) { /** * Creates a String representation of a given {@link Predicate}. * - * @see Rule syntax . + * @see Rule syntax . * @param predicate a {@link Predicate} * @return String representation corresponding to a given {@link Predicate}. */ @@ -327,7 +327,7 @@ public static String getString(final Predicate predicate) { /** * Creates a String representation of a given {@link DataSourceDeclaration}. * - * @see Rule syntax . + * @see Rule syntax . * @param dataSourceDeclaration a {@link DataSourceDeclaration} * @return String representation corresponding to a given * {@link DataSourceDeclaration}. @@ -340,7 +340,7 @@ public static String getString(final DataSourceDeclaration dataSourceDeclaration /** * Creates a String representation of a given {@link CsvFileDataSource}. * - * @see Rule syntax .. + * @see Rule syntax .. * * @param csvFileDataSource * @return String representation corresponding to a given @@ -353,7 +353,7 @@ public static String getString(final CsvFileDataSource csvFileDataSource) { /** * Creates a String representation of a given {@link RdfFileDataSource}. * - * @see Rule syntax .. + * @see Rule syntax .. * * * @param rdfFileDataSource @@ -368,7 +368,7 @@ public static String getString(final RdfFileDataSource rdfFileDataSource) { * Creates a String representation of a given * {@link SparqlQueryResultDataSource}. * - * @see Rule syntax . + * @see Rule syntax . * * * @param dataSource diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java index 5ebe5560c..e125d3267 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,7 @@ import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.exceptions.VLog4jException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; @@ -451,12 +451,12 @@ Map> getFactsByPredicate() { * This is essentially * {@link org.semanticweb.rulewerk.parser.RuleParser#parseInto}, but we need to * avoid a circular dependency here -- this is also why we throw - * {@link VLog4jException} instead of + * {@link RulewerkException} instead of * {@link org.semanticweb.rulewerk.parser.ParsingException}. */ @FunctionalInterface public interface AdditionalInputParser { - void parseInto(InputStream stream, KnowledgeBase kb) throws IOException, VLog4jException; + void parseInto(InputStream stream, KnowledgeBase kb) throws IOException, RulewerkException; } /** @@ -469,10 +469,10 @@ public interface AdditionalInputParser { * @throws IOException when reading {@code file} fails * @throws IllegalArgumentException when {@code file} is null or has already * been imported - * @throws VLog4jException when parseFunction throws VLog4jException + * @throws RulewerkException when parseFunction throws RulewerkException */ public void importRulesFile(File file, AdditionalInputParser parseFunction) - throws VLog4jException, IOException, IllegalArgumentException { + throws RulewerkException, IOException, IllegalArgumentException { Validate.notNull(file, "file must not be null"); boolean isNewFile = importedFilePaths.add(file.getCanonicalPath()); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 345bd8aaf..5fe7d568d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -33,7 +33,7 @@ /** * A visitor that converts {@link Term}s of different types to corresponding * internal VLog model {@link karmaresearch.vlog.Term}s. - * + * * @author Irina Dragoste * */ @@ -69,17 +69,17 @@ public karmaresearch.vlog.Term visit(LanguageStringConstant term) { /** * Converts the given constant to the name of a constant in VLog. - * + * * @param constant * @return VLog constant string */ public static String getVLogNameForConstant(Constant constant) { if (constant.getType() == TermType.ABSTRACT_CONSTANT) { - String vLog4jConstantName = constant.getName(); - if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; + String rulewerkConstantName = constant.getName(); + if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > + return "<" + rulewerkConstantName + ">"; } else { // keep relative IRIs unchanged - return vLog4jConstantName; + return rulewerkConstantName; } } else { // datatype literal return constant.getName(); @@ -87,19 +87,19 @@ public static String getVLogNameForConstant(Constant constant) { } /** - * Converts the string representation of a constant in VLog4j directly to the + * Converts the string representation of a constant in Rulewerk directly to the * name of a constant in VLog, without parsing it into a {@link Constant} first. - * - * @param vLog4jConstantName + * + * @param rulewerkConstantName * @return VLog constant string */ - public static String getVLogNameForConstantName(String vLog4jConstantName) { - if (vLog4jConstantName.startsWith("\"")) { // keep datatype literal strings unchanged - return vLog4jConstantName; - } else if (vLog4jConstantName.contains(":")) { // enclose IRIs with < > - return "<" + vLog4jConstantName + ">"; + public static String getVLogNameForConstantName(String rulewerkConstantName) { + if (rulewerkConstantName.startsWith("\"")) { // keep datatype literal strings unchanged + return rulewerkConstantName; + } else if (rulewerkConstantName.contains(":")) { // enclose IRIs with < > + return "<" + rulewerkConstantName + ">"; } else { // keep relative IRIs unchanged - return vLog4jConstantName; + return rulewerkConstantName; } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index 66f88d4bb..bfc8ab05a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -55,9 +55,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -642,7 +642,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { try { load(); - } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/vlog4j/issues/128 + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 throw new RuntimeException(e); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index d6889aae4..131d5a712 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -103,7 +103,7 @@ public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationExce @Test public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { - String prefix = "vlog4j_generated_"; + String prefix = "rulewerk_generated_"; prefixDeclarations.setPrefixIri(prefix + "0:", BASE + "generated/"); prefixDeclarations.setPrefixIri("eg:", BASE); prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); @@ -127,7 +127,7 @@ public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPref prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); assertEquals(BASE, this.prefixDeclarations.getPrefixIri("eg:")); - assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("vlog4j_generated_0:")); + assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("rulewerk_generated_0:")); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java index 0ee0ddcce..0f67a6fa5 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogToModelConverterTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,44 +35,44 @@ public class VLogToModelConverterTest { @Test public void testAbstractConstantConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); - final Term vLog4jTerm = new AbstractConstantImpl("c"); + final Term rulewerkTerm = new AbstractConstantImpl("c"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testAbstractConstantIriConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, ""); - final Term vLog4jTerm = new AbstractConstantImpl("http://example.org/test"); + final Term rulewerkTerm = new AbstractConstantImpl("http://example.org/test"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testDatatypeConstantConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"a\"^^"); - final Term vLog4jTerm = new DatatypeConstantImpl("a", "http://example.org/test"); + final Term rulewerkTerm = new DatatypeConstantImpl("a", "http://example.org/test"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testLanguageStringConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "\"Test\"@en"); - final Term vLog4jTerm = new LanguageStringConstantImpl("Test", "en"); + final Term rulewerkTerm = new LanguageStringConstantImpl("Test", "en"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test public void testNamedNullConversion() { final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_123"); - final Term vLog4jTerm = new NamedNullImpl("_123"); + final Term rulewerkTerm = new NamedNullImpl("_123"); final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); - assertEquals(vLog4jTerm, convertedTerm); + assertEquals(rulewerkTerm, convertedTerm); } @Test(expected = IllegalArgumentException.class) diff --git a/rulewerk-examples/README.md b/rulewerk-examples/README.md index b76702fd9..0bc31121e 100644 --- a/rulewerk-examples/README.md +++ b/rulewerk-examples/README.md @@ -1,6 +1,6 @@ -This project contains examples of different use-cases of **vlog4j** functionality. -- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : *SkolemVsRestrictedChaseTermination.java* -- adding facts from a **CSV file**; exporting query results to CSV: *AddDataFromCSVFile.java* -- adding facts from the result of a **SPARQL query** on a remote endpoint: *AddDataFromSparqlQueryResults.java* -- converting an **OWL ontology** into rules and facts; reasoning on an **OWL ontology** : *owlapi.OwlOntologyToRulesAndFacts.java* -- converting an **RDF resource** into facts: *rdf.AddDataFromRDFModel.java* +This project contains examples of different use-cases of **rulewerk** functionality. +- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : *SkolemVsRestrictedChaseTermination.java* +- adding facts from a **CSV file**; exporting query results to CSV: *AddDataFromCSVFile.java* +- adding facts from the result of a **SPARQL query** on a remote endpoint: *AddDataFromSparqlQueryResults.java* +- converting an **OWL ontology** into rules and facts; reasoning on an **OWL ontology** : *owlapi.OwlOntologyToRulesAndFacts.java* +- converting an **RDF resource** into facts: *rdf.AddDataFromRDFModel.java* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java index 89cadd193..28e870fa6 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,10 +29,10 @@ import org.semanticweb.rulewerk.parser.RuleParser; /** - * This example demonstrates the basic usage of VLog4j for rule reasoning. We + * This example demonstrates the basic usage of Rulewerk for rule reasoning. We * are using a fixed set of rules and facts defined in Java without any external * sources, and we query for some of the results. - * + * * @author Markus Kroetzsch * */ diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java index 646b65f5a..2dc50ca84 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,7 +31,7 @@ import org.semanticweb.rulewerk.core.reasoner.Reasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; import fr.lirmm.graphik.graal.api.core.Atom; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; @@ -41,14 +41,14 @@ /** * This example shows how facts can be imported from files in the * DLGP/DLP format. - * + * * The Graal * {@link DlgpParser} is used to parse the program. This step requires a * {@link File}, {@link InputStream}, {@link Reader}, or {@link String} * containing or pointing to the program. - * + * * The {@link Atom Atoms}, {@link Rule Rules}, and {@link ConjunctiveQuery - * ConjunctiveQueries} are then converted for use by VLog4J. Take care to add + * ConjunctiveQueries} are then converted for use by Rulewerk. Take care to add * the rules resulting from the {@link ConjunctiveQuery ConjunctiveQueries} as * well as the {@link Rule Rules} to the {@link Reasoner}; see * {@link GraalConjunctiveQueryToRule} for details. @@ -66,7 +66,7 @@ public static void main(final String[] args) throws IOException { /* * 1. Parse the DLGP/DLP file using the DlgpParser. - * + * * DlgpParser supports Files, InputStreams, Readers, and Strings. While other * objects such as prefixes can also be part of the iterator, they are * automatically resolved and do not need to be handled here. @@ -86,14 +86,14 @@ public static void main(final String[] args) throws IOException { /* * 2. ConjunctiveQueries consist of a conjunction of literals and a set of - * answer variables. To query this with VLog4J, an additional rule needs to be + * answer variables. To query this with Rulewerk, an additional rule needs to be * added for each ConjunctiveQuery. See GraalConjunctiveQueryToRule for details. */ final List convertedConjunctiveQueries = new ArrayList<>(); for (final ConjunctiveQuery conjunctiveQuery : graalConjunctiveQueries) { final String queryUniqueId = "query" + convertedConjunctiveQueries.size(); - convertedConjunctiveQueries.add(GraalToVLog4JModelConverter.convertQuery(queryUniqueId, conjunctiveQuery)); + convertedConjunctiveQueries.add(GraalToRulewerkModelConverter.convertQuery(queryUniqueId, conjunctiveQuery)); } /* @@ -107,7 +107,7 @@ public static void main(final String[] args) throws IOException { /* * Add facts to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + kb.addStatements(GraalToRulewerkModelConverter.convertAtomsToFacts(graalAtoms)); /* * Load the knowledge base into the reasoner */ @@ -120,7 +120,7 @@ public static void main(final String[] args) throws IOException { /* * Add rules to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(GraalToRulewerkModelConverter.convertRules(graalRules)); for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { kb.addStatement(graalConjunctiveQueryToRule.getRule()); } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java index 6000043b8..b4f05470f 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,7 +30,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; import org.semanticweb.rulewerk.examples.ExamplesUtils; import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; -import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; @@ -39,10 +39,10 @@ * This example shows how facts and rules can be imported from objects of the * Graal library. Special * care must be taken with the import of Graal {@link ConjunctiveQuery}-objects, - * since unlike with VLog4J, they represent both the query atom and the + * since unlike with Rulewerk, they represent both the query atom and the * corresponding rule. *

        - * In VLog4J, the reasoner is queried by a query Atom and the results are all + * In Rulewerk, the reasoner is queried by a query Atom and the results are all * facts matching this query Atom.
        * Answering a Graal {@link ConjunctiveQuery} over a certain knowledge base is * equivalent to adding a {@link Rule} to the knowledge base, prior to @@ -53,7 +53,7 @@ * as a a query Atom to obtain the results of the Graal * {@link ConjunctiveQuery}. *

        - * + * * @author Adrian Bielefeldt * */ @@ -67,7 +67,7 @@ public static void main(final String[] args) throws IOException { /* * 1.1 Rules to map external database (EDB) predicates to internal database - * predicates (IDB). Necessary because VLog4J requires separation between input + * predicates (IDB). Necessary because Rulewerk requires separation between input * predicates and predicates for which additional facts can be derived. */ graalRules.add(DlgpParser.parseRule("bicycleIDB(X) :- bicycleEDB(X).")); @@ -114,7 +114,7 @@ public static void main(final String[] args) throws IOException { * then querying with query(?b, ?w) The rule from convertedGraalConjunctiveQuery * needs to be added to the reasoner. */ - final GraalConjunctiveQueryToRule convertedGraalConjunctiveQuery = GraalToVLog4JModelConverter.convertQuery( + final GraalConjunctiveQueryToRule convertedGraalConjunctiveQuery = GraalToRulewerkModelConverter.convertQuery( "graalQuery", DlgpParser.parseQuery("?(B, W) :- bicycleIDB(B), wheelIDB(W), isPartOfIDB(W, B).")); /* @@ -128,7 +128,7 @@ public static void main(final String[] args) throws IOException { /* * Add facts to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertAtomsToFacts(graalAtoms)); + kb.addStatements(GraalToRulewerkModelConverter.convertAtomsToFacts(graalAtoms)); /* * Load the knowledge base into the reasoner */ @@ -143,7 +143,7 @@ public static void main(final String[] args) throws IOException { /* * Add rules to the reasoner knowledge base */ - kb.addStatements(GraalToVLog4JModelConverter.convertRules(graalRules)); + kb.addStatements(GraalToRulewerkModelConverter.convertRules(graalRules)); kb.addStatements(convertedGraalConjunctiveQuery.getRule()); /* diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 774b10265..6c1e9f19d 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,7 @@ import org.semanticweb.rulewerk.core.reasoner.implementation.VLogReasoner; import org.semanticweb.rulewerk.examples.DoidExample; import org.semanticweb.rulewerk.examples.ExamplesUtils; -import org.semanticweb.rulewerk.graal.GraalToVLog4JModelConverter; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; import fr.lirmm.graphik.graal.io.dlp.DlgpParser; @@ -100,7 +100,7 @@ public static void main(final String[] args) throws IOException { final Object object = parser.next(); if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { kb.addStatement( - GraalToVLog4JModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); + GraalToRulewerkModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); } } } diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java index dad6f26f1..5bcb6bea0 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -43,10 +43,10 @@ import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; /** - * This example shows how vlog4j-owlapi library (class + * This example shows how rulewerk-owlapi library (class * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into - * vlog4j-core {@link Rule}s and {@link Fact}s. - * + * rulewerk-core {@link Rule}s and {@link Fact}s. + * * @author Irina Dragoste * */ @@ -60,7 +60,7 @@ public static void main(final String[] args) throws OWLOntologyCreationException .loadOntologyFromOntologyDocument(new File(ExamplesUtils.INPUT_FOLDER + "owl/bike.owl")); /* - * vlog4j.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in + * rulewerk.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in * source ontology to target Rule and Atom objects */ final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); @@ -105,7 +105,7 @@ public static void main(final String[] args) throws OWLOntologyCreationException /* * See that an unnamed individual has been introduced to satisfy * owl:someValuesFrom restriction: - * + * * :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . */ diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java index 251f89777..77af29d19 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -52,10 +52,10 @@ import org.semanticweb.rulewerk.rdf.RdfModelConverter; /** - * This example shows how vlog4j-rdf library's utility class + * This example shows how rulewerk-rdf library's utility class * {@link RdfModelConverter} can be used to convert RDF {@link Model}s from - * various types of RDF resources to vlog4j-core {@code Atom} sets. - * + * various types of RDF resources to rulewerk-core {@code Atom} sets. + * * @author Irina Dragoste * */ @@ -77,7 +77,7 @@ public static void main(final String[] args) RDFFormat.RDFXML); /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". */ final Set tripleFactsISWC2016 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2016); @@ -96,7 +96,7 @@ public static void main(final String[] args) RDFFormat.TURTLE); /* - * Using vlog4j-rdf library, we convert RDF Model triples to facts, each having + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each having * the ternary predicate "TRIPLE". */ final Set tripleFactsISWC2017 = RdfModelConverter.rdfModelToFacts(rdfModelISWC2017); @@ -156,7 +156,7 @@ public static void main(final String[] args) /** * Parses the data from the supplied InputStream, using the supplied baseURI to * resolve any relative URI references. - * + * * @param inputStream The content to be parsed, expected to be in the given * {@code rdfFormat}. * @param baseURI The URI associated with the data in the InputStream. diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java index 0c077263e..a99563a3f 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,14 +20,16 @@ * #L% */ +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + /** - * An exception to signify that a conversion from Graal data structures to VLog4J data structures could not + * An exception to signify that a conversion from Graal data structures to Rulewerk data structures could not * be made. - * + * * @author Adrian Bielefeldt * */ -public class GraalConvertException extends RuntimeException { +public class GraalConvertException extends RulewerkRuntimeException { /** * generated serial version UID diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java similarity index 87% rename from rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java rename to rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java index f23cb6042..5ab82d428 100644 --- a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverter.java +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -43,28 +43,28 @@ /** * Utility class to convert * Graal data structures into - * VLog4J data structures. Labels ({@link ConjunctiveQuery#getLabel()}, + * Rulewerk data structures. Labels ({@link ConjunctiveQuery#getLabel()}, * {@link fr.lirmm.graphik.graal.api.core.Rule#getLabel() Rule.getLabel()}, or * {@link fr.lirmm.graphik.graal.api.core.Term#getLabel() Term.getLabel()}) are - * not converted since VLog4J does not support them. + * not converted since Rulewerk does not support them. * * @author Adrian Bielefeldt * */ -public final class GraalToVLog4JModelConverter { +public final class GraalToRulewerkModelConverter { - private GraalToVLog4JModelConverter() { + private GraalToRulewerkModelConverter() { } /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a - * {@link PositiveLiteral VLog4J PositiveLiteral}. + * {@link PositiveLiteral Rulewerk PositiveLiteral}. * * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom * Graal Atom} * @param existentialVariables set of variables that are existentially * quantified - * @return A {@link PositiveLiteral VLog4J PositiveLiteral} + * @return A {@link PositiveLiteral Rulewerk PositiveLiteral} */ public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core.Atom atom, final Set existentialVariables) { @@ -75,10 +75,10 @@ public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core. /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a - * {@link Fact VLog4J fact}. + * {@link Fact Rulewerk fact}. * * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} - * @return A {@link Fact VLog4J fact} + * @return A {@link Fact Rulewerk fact} * @throws IllegalArgumentException if the converted atom contains terms that * cannot occur in facts */ @@ -90,12 +90,12 @@ public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom /** * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Atom Graal - * Atoms} into a {@link List} of {@link PositiveLiteral VLog4J + * Atoms} into a {@link List} of {@link PositiveLiteral Rulewerk * PositiveLiterals}. * * @param atoms list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal * Atoms}. - * @return A {@link List} of {@link PositiveLiteral VLog4J PositiveLiterals}. + * @return A {@link List} of {@link PositiveLiteral Rulewerk PositiveLiterals}. */ public static List convertAtoms(final List atoms) { final List result = new ArrayList<>(); @@ -107,11 +107,11 @@ public static List convertAtoms(final List convertAtomsToFacts(final List atoms) { final List result = new ArrayList<>(); @@ -122,13 +122,13 @@ public static List convertAtomsToFacts(final List convertAtomSet(final AtomSet atomSet, final Set existentialVariables) { @@ -146,11 +146,11 @@ private static Conjunction convertAtomSet(final AtomSet atomSet /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Predicate Graal Predicate} - * into a {@link Predicate VLog4J Predicate}. + * into a {@link Predicate Rulewerk Predicate}. * * @param predicate A {@link fr.lirmm.graphik.graal.api.core.Predicate Graal * Predicate} - * @return A {@link Predicate VLog4J Predicate} + * @return A {@link Predicate Rulewerk Predicate} */ private static Predicate convertPredicate(final fr.lirmm.graphik.graal.api.core.Predicate predicate) { return Expressions.makePredicate(predicate.getIdentifier().toString(), predicate.getArity()); @@ -190,12 +190,12 @@ public static GraalConjunctiveQueryToRule convertQuery(final String ruleHeadPred if (conjunctiveQuery.getAtomSet().isEmpty()) { throw new GraalConvertException(MessageFormat.format( - "Graal ConjunctiveQuery {0} with empty body is not supported in VLog4j.", conjunctiveQuery)); + "Graal ConjunctiveQuery {0} with empty body is not supported in Rulewerk.", conjunctiveQuery)); } if (conjunctiveQuery.getAnswerVariables().isEmpty()) { throw new GraalConvertException(MessageFormat.format( - "Graal ConjunctiveQuery {0} with no answer variables is not supported in VLog4J.", + "Graal ConjunctiveQuery {0} with no answer variables is not supported in Rulewerk.", conjunctiveQuery)); } @@ -208,10 +208,10 @@ public static GraalConjunctiveQueryToRule convertQuery(final String ruleHeadPred /** * Converts a {@link fr.lirmm.graphik.graal.api.core.Rule Graal Rule} into a - * {@link Rule Vlog4J Rule}. + * {@link Rule Rulewerk Rule}. * * @param rule A {@link fr.lirmm.graphik.graal.api.core.Rule Graal Rule}. - * @return A {@link Rule Vlog4J Rule}. + * @return A {@link Rule Rulewerk Rule}. */ public static Rule convertRule(final fr.lirmm.graphik.graal.api.core.Rule rule) { final Conjunction head = convertAtomSet(rule.getHead(), rule.getExistentials()); @@ -221,11 +221,11 @@ public static Rule convertRule(final fr.lirmm.graphik.graal.api.core.Rule rule) /** * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Rule Graal - * Rules} into a {@link List} of {@link Rule VLog4J Rules}. + * Rules} into a {@link List} of {@link Rule Rulewerk Rules}. * * @param rules A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Rule * Graal Rules}. - * @return A {@link List} of {@link Rule VLog4J Rules}. + * @return A {@link List} of {@link Rule Rulewerk Rules}. */ public static List convertRules(final List rules) { final List result = new ArrayList<>(); @@ -237,7 +237,7 @@ public static List convertRules(final List convertRules(final List, if it * is a Constant.
        * Graal Variable with identifier "a" will be transformed to - * vlog4j Variable with name "a". Graal Constant with identifier - * "c" will be transformed to vlog4j Constant with name + * rulewerk Variable with name "a". Graal Constant with identifier + * "c" will be transformed to rulewerk Constant with name * "<c>". * * @throws GraalConvertException If the term is neither variable nor constant. @@ -277,11 +277,11 @@ private static Term convertTerm(final fr.lirmm.graphik.graal.api.core.Term term, /** * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Term Graal - * Terms} into a {@link List} of {@link Term VLog4J Terms}. + * Terms} into a {@link List} of {@link Term Rulewerk Terms}. * * @param terms A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Term * Graal Terms} - * @return A {@link List} of {@link Term VLog4J Terms} + * @return A {@link List} of {@link Term Rulewerk Terms} */ private static List convertTerms(final List terms, final Set existentialVariables) { diff --git a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java similarity index 69% rename from rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java rename to rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java index e522de86d..b6c3cd88b 100644 --- a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToVLog4JModelConverterTest.java +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -46,7 +46,7 @@ /** * @author Adrian Bielefeldt */ -public class GraalToVLog4JModelConverterTest { +public class GraalToRulewerkModelConverterTest { private final String socrate = "socrate"; private final String redsBike = "redsBike"; @@ -61,18 +61,18 @@ public class GraalToVLog4JModelConverterTest { private final String y = "Y"; private final String z = "Z"; - private final Constant vlog4j_socrate = Expressions.makeAbstractConstant(this.socrate); + private final Constant rulewerk_socrate = Expressions.makeAbstractConstant(this.socrate); - private final Predicate vlog4j_bicycle = Expressions.makePredicate(this.bicycle, 1); - private final Predicate vlog4j_hasPart = Expressions.makePredicate(this.hasPart, 2); - private final Predicate vlog4j_human = Expressions.makePredicate(this.human, 1); - private final Predicate vlog4j_mortal = Expressions.makePredicate(this.mortal, 1); - private final Predicate vlog4j_wheel = Expressions.makePredicate(this.wheel, 1); + private final Predicate rulewerk_bicycle = Expressions.makePredicate(this.bicycle, 1); + private final Predicate rulewerk_hasPart = Expressions.makePredicate(this.hasPart, 2); + private final Predicate rulewerk_human = Expressions.makePredicate(this.human, 1); + private final Predicate rulewerk_mortal = Expressions.makePredicate(this.mortal, 1); + private final Predicate rulewerk_wheel = Expressions.makePredicate(this.wheel, 1); - private final Variable vlog4j_x = Expressions.makeUniversalVariable(this.x); - private final Variable vlog4j_y = Expressions.makeUniversalVariable(this.y); - private final Variable vlog4j_z = Expressions.makeUniversalVariable(this.z); - private final Variable vlog4j_ex_y = Expressions.makeExistentialVariable(this.y); + private final Variable rulewerk_x = Expressions.makeUniversalVariable(this.x); + private final Variable rulewerk_y = Expressions.makeUniversalVariable(this.y); + private final Variable rulewerk_z = Expressions.makeUniversalVariable(this.z); + private final Variable rulewerk_ex_y = Expressions.makeExistentialVariable(this.y); private final DefaultTermFactory termFactory = new DefaultTermFactory(); @@ -98,50 +98,50 @@ public class GraalToVLog4JModelConverterTest { @Test public void testConvertAtom() throws ParseException { - final PositiveLiteral vlog4j_atom = Expressions.makePositiveLiteral(this.vlog4j_human, this.vlog4j_socrate); + final PositiveLiteral rulewerk_atom = Expressions.makePositiveLiteral(this.rulewerk_human, this.rulewerk_socrate); final fr.lirmm.graphik.graal.api.core.Atom graal_atom = new DefaultAtom(this.graal_human, this.graal_socrate); - assertEquals(vlog4j_atom, GraalToVLog4JModelConverter.convertAtom(graal_atom, Collections.emptySet())); + assertEquals(rulewerk_atom, GraalToRulewerkModelConverter.convertAtom(graal_atom, Collections.emptySet())); - final PositiveLiteral vlog4j_atom_2 = Expressions.makePositiveLiteral(this.vlog4j_hasPart, this.vlog4j_x, - this.vlog4j_socrate); + final PositiveLiteral rulewerk_atom_2 = Expressions.makePositiveLiteral(this.rulewerk_hasPart, this.rulewerk_x, + this.rulewerk_socrate); final fr.lirmm.graphik.graal.api.core.Atom graal_atom_2 = new DefaultAtom(this.graal_hasPart, this.graal_x, this.graal_socrate); - assertEquals(vlog4j_atom_2, GraalToVLog4JModelConverter.convertAtom(graal_atom_2, Collections.emptySet())); + assertEquals(rulewerk_atom_2, GraalToRulewerkModelConverter.convertAtom(graal_atom_2, Collections.emptySet())); } @Test public void testConvertFact() throws ParseException { - final Fact vlog4j_atom = Expressions.makeFact(this.vlog4j_human, Arrays.asList(this.vlog4j_socrate)); + final Fact rulewerk_atom = Expressions.makeFact(this.rulewerk_human, Arrays.asList(this.rulewerk_socrate)); final fr.lirmm.graphik.graal.api.core.Atom graal_atom = new DefaultAtom(this.graal_human, this.graal_socrate); - assertEquals(vlog4j_atom, GraalToVLog4JModelConverter.convertAtomToFact(graal_atom)); + assertEquals(rulewerk_atom, GraalToRulewerkModelConverter.convertAtomToFact(graal_atom)); } @Test public void testConvertRule() throws ParseException { // moral(X) :- human(X) - final PositiveLiteral vlog4j_mortal_atom = Expressions.makePositiveLiteral(this.vlog4j_mortal, this.vlog4j_x); - final PositiveLiteral vlog4j_human_atom = Expressions.makePositiveLiteral(this.vlog4j_human, this.vlog4j_x); - final Rule vlog4j_rule = Expressions.makeRule(vlog4j_mortal_atom, vlog4j_human_atom); + final PositiveLiteral rulewerk_mortal_atom = Expressions.makePositiveLiteral(this.rulewerk_mortal, this.rulewerk_x); + final PositiveLiteral rulewerk_human_atom = Expressions.makePositiveLiteral(this.rulewerk_human, this.rulewerk_x); + final Rule rulewerk_rule = Expressions.makeRule(rulewerk_mortal_atom, rulewerk_human_atom); final fr.lirmm.graphik.graal.api.core.Atom graal_mortal_atom = new DefaultAtom(this.graal_mortal, this.graal_x); final fr.lirmm.graphik.graal.api.core.Atom graal_human_atom = new DefaultAtom(this.graal_human, this.graal_x); final fr.lirmm.graphik.graal.api.core.Rule graal_rule = new DefaultRule(new LinkedListAtomSet(graal_human_atom), new LinkedListAtomSet(graal_mortal_atom)); - assertEquals(vlog4j_rule, GraalToVLog4JModelConverter.convertRule(graal_rule)); + assertEquals(rulewerk_rule, GraalToRulewerkModelConverter.convertRule(graal_rule)); } @Test public void testConvertExistentialRule() throws ParseException { // hasPart(X, Y), wheel(Y) :- bicycle(X) - final PositiveLiteral vlog4j_hasPart_atom = Expressions.makePositiveLiteral(this.vlog4j_hasPart, this.vlog4j_x, - this.vlog4j_ex_y); - final PositiveLiteral vlog4j_wheel_atom = Expressions.makePositiveLiteral(this.vlog4j_wheel, this.vlog4j_ex_y); - final PositiveLiteral vlog4j_bicycle_atom = Expressions.makePositiveLiteral(this.vlog4j_bicycle, this.vlog4j_x); - final Rule vlog4j_rule = Expressions.makeRule( - Expressions.makePositiveConjunction(vlog4j_hasPart_atom, vlog4j_wheel_atom), - Expressions.makeConjunction(vlog4j_bicycle_atom)); + final PositiveLiteral rulewerk_hasPart_atom = Expressions.makePositiveLiteral(this.rulewerk_hasPart, this.rulewerk_x, + this.rulewerk_ex_y); + final PositiveLiteral rulewerk_wheel_atom = Expressions.makePositiveLiteral(this.rulewerk_wheel, this.rulewerk_ex_y); + final PositiveLiteral rulewerk_bicycle_atom = Expressions.makePositiveLiteral(this.rulewerk_bicycle, this.rulewerk_x); + final Rule rulewerk_rule = Expressions.makeRule( + Expressions.makePositiveConjunction(rulewerk_hasPart_atom, rulewerk_wheel_atom), + Expressions.makeConjunction(rulewerk_bicycle_atom)); final fr.lirmm.graphik.graal.api.core.Atom graal_hasPart_atom = new DefaultAtom(this.graal_hasPart, this.graal_x, this.graal_y); @@ -151,7 +151,7 @@ public void testConvertExistentialRule() throws ParseException { final fr.lirmm.graphik.graal.api.core.Rule graal_rule = new DefaultRule( new LinkedListAtomSet(graal_bicycle_atom), new LinkedListAtomSet(graal_hasPart_atom, graal_wheel_atom)); - assertEquals(vlog4j_rule, GraalToVLog4JModelConverter.convertRule(graal_rule)); + assertEquals(rulewerk_rule, GraalToRulewerkModelConverter.convertRule(graal_rule)); } @Test @@ -159,16 +159,16 @@ public void testConvertQuery() throws ParseException { // ?(X) :- mortal(X) final String mortalQuery = "mortalQuery"; final PositiveLiteral query = Expressions.makePositiveLiteral(Expressions.makePredicate(mortalQuery, 1), - this.vlog4j_x); + this.rulewerk_x); final Rule queryRule = Expressions.makeRule(query, - Expressions.makePositiveLiteral(this.vlog4j_mortal, this.vlog4j_x)); + Expressions.makePositiveLiteral(this.rulewerk_mortal, this.rulewerk_x)); final fr.lirmm.graphik.graal.api.core.Atom graal_query_atom = new DefaultAtom(this.graal_mortal, this.graal_x); final ConjunctiveQuery graal_query = new DefaultConjunctiveQuery(new LinkedListAtomSet(graal_query_atom), Arrays.asList(this.graal_x)); - final GraalConjunctiveQueryToRule importedQuery = GraalToVLog4JModelConverter.convertQuery(mortalQuery, + final GraalConjunctiveQueryToRule importedQuery = GraalToRulewerkModelConverter.convertQuery(mortalQuery, graal_query); assertEquals(query, importedQuery.getQuery()); assertEquals(queryRule, importedQuery.getRule()); @@ -204,21 +204,21 @@ public void testConvertQuery() throws ParseException { graal_predicate4_atom), Arrays.asList(this.graal_x, this.graal_x, this.graal_y)); - final GraalConjunctiveQueryToRule importedComplexQuery = GraalToVLog4JModelConverter.convertQuery(complexQuery, + final GraalConjunctiveQueryToRule importedComplexQuery = GraalToRulewerkModelConverter.convertQuery(complexQuery, graal_complex_query); final PositiveLiteral expectedComplexQueryAtom = Expressions.makePositiveLiteral( - Expressions.makePredicate(complexQuery, 3), this.vlog4j_x, this.vlog4j_x, this.vlog4j_y); - final PositiveLiteral vlog4j_predicate1_atom = Expressions - .makePositiveLiteral(Expressions.makePredicate(predicate1, 1), this.vlog4j_x); - final PositiveLiteral vlog4j_predicate2_atom = Expressions - .makePositiveLiteral(Expressions.makePredicate(predicate2, 2), this.vlog4j_y, this.vlog4j_x); - final PositiveLiteral vlog4j_predicate3_atom = Expressions.makePositiveLiteral( - Expressions.makePredicate(predicate3, 2), this.vlog4j_y, Expressions.makeAbstractConstant(stockholm)); - final PositiveLiteral vlog4j_predicate4_atom = Expressions.makePositiveLiteral( - Expressions.makePredicate(predicate4, 3), this.vlog4j_x, this.vlog4j_y, this.vlog4j_z); - final Rule expectedComplexQueryRule = Expressions.makeRule(expectedComplexQueryAtom, vlog4j_predicate1_atom, - vlog4j_predicate2_atom, vlog4j_predicate3_atom, vlog4j_predicate4_atom); + Expressions.makePredicate(complexQuery, 3), this.rulewerk_x, this.rulewerk_x, this.rulewerk_y); + final PositiveLiteral rulewerk_predicate1_atom = Expressions + .makePositiveLiteral(Expressions.makePredicate(predicate1, 1), this.rulewerk_x); + final PositiveLiteral rulewerk_predicate2_atom = Expressions + .makePositiveLiteral(Expressions.makePredicate(predicate2, 2), this.rulewerk_y, this.rulewerk_x); + final PositiveLiteral rulewerk_predicate3_atom = Expressions.makePositiveLiteral( + Expressions.makePredicate(predicate3, 2), this.rulewerk_y, Expressions.makeAbstractConstant(stockholm)); + final PositiveLiteral rulewerk_predicate4_atom = Expressions.makePositiveLiteral( + Expressions.makePredicate(predicate4, 3), this.rulewerk_x, this.rulewerk_y, this.rulewerk_z); + final Rule expectedComplexQueryRule = Expressions.makeRule(expectedComplexQueryAtom, rulewerk_predicate1_atom, + rulewerk_predicate2_atom, rulewerk_predicate3_atom, rulewerk_predicate4_atom); assertEquals(expectedComplexQueryAtom, importedComplexQuery.getQuery()); @@ -232,7 +232,7 @@ public void testConvertQueryExceptionNoVariables() { this.graal_socrate); final ConjunctiveQuery graal_query_without_answer_variables = new DefaultConjunctiveQuery( new LinkedListAtomSet(graal_atom), new ArrayList<>()); - GraalToVLog4JModelConverter.convertQuery("name", graal_query_without_answer_variables); + GraalToRulewerkModelConverter.convertQuery("name", graal_query_without_answer_variables); } @Test(expected = GraalConvertException.class) @@ -240,7 +240,7 @@ public void testConvertQueryExceptionEmptyBody() { final ConjunctiveQuery graal_query_without_body = new DefaultConjunctiveQuery(new LinkedListAtomSet(), Arrays.asList(this.graal_y)); - GraalToVLog4JModelConverter.convertQuery("name", graal_query_without_body); + GraalToRulewerkModelConverter.convertQuery("name", graal_query_without_body); } @Test(expected = GraalConvertException.class) @@ -252,6 +252,6 @@ public void testConvertQueryExceptionBlankPredicate() { final ConjunctiveQuery graal_query = new DefaultConjunctiveQuery( new LinkedListAtomSet(graal_atom_1, graal_atom_2), Arrays.asList(this.graal_z)); - GraalToVLog4JModelConverter.convertQuery(" ", graal_query); + GraalToRulewerkModelConverter.convertQuery(" ", graal_query); } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java index ee2a687f1..5843f1db7 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -78,7 +78,7 @@ public class ParserConfiguration { * production of the rules grammar, corresponding to some {@link DataSource} * type. * - * @see + * @see * the grammar. * * @param name Name of the data source, as it appears in the declaring diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java index abca75072..802cafe03 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,9 @@ * #L% */ -import org.semanticweb.rulewerk.core.exceptions.VLog4jException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; -public class ParsingException extends VLog4jException { +public class ParsingException extends RulewerkException { private static final long serialVersionUID = 2849123381757026724L; public ParsingException(String message) { diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj index b630085bb..0d633b1ad 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -1,8 +1,8 @@ /*- * #%L - * vlog4j-parser + * rulewerk-parser * %% - * Copyright (C) 2018 - 2020 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From a7ad0ab286d53d85eb69768030e9688c162bf404 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 15:41:40 +0100 Subject: [PATCH 0821/1255] Make sure all exceptions extend the appropriate superclass Fixup --- .../exceptions/RulewerkRuntimeException.java | 8 ++ .../core/model/implementation/Serializer.java | 13 ++-- .../reasoner/implementation/VLogReasoner.java | 73 ++++++++++--------- .../implementation/VLogReasonerNegation.java | 7 +- .../VLogReasonerSparqlInput.java | 17 +++-- .../rulewerk/examples/ExamplesUtils.java | 9 ++- .../owlapi/OwlAxiomToRulesConverter.java | 31 ++++---- .../OwlFeatureNotSupportedException.java | 14 ++-- .../owlapi/OwlToRulesConversionHelper.java | 35 +++++---- .../rulewerk/rdf/RdfValueToTermConverter.java | 7 +- 10 files changed, 112 insertions(+), 102 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java index 609e0f882..9ff8dca3a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java @@ -33,6 +33,10 @@ public class RulewerkRuntimeException extends RuntimeException { */ private static final long serialVersionUID = -6574826887294416900L; + public RulewerkRuntimeException(Throwable cause) { + super(cause); + } + public RulewerkRuntimeException(String message, Throwable cause) { super(message, cause); } @@ -40,4 +44,8 @@ public RulewerkRuntimeException(String message, Throwable cause) { public RulewerkRuntimeException(String message) { super(message); } + + public RulewerkRuntimeException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index bbccf9e1b..0a2668cd9 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -1,12 +1,5 @@ package org.semanticweb.rulewerk.core.model.implementation; -import java.util.List; -import java.util.Map.Entry; -import java.util.function.Function; - -import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; -import org.semanticweb.rulewerk.core.model.api.AbstractConstant; - /*- * #%L * Rulewerk Core Components @@ -27,8 +20,12 @@ * #L% */ +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Conjunction; -import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java index bfc8ab05a..00b774ef0 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasoner.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; @@ -12,6 +32,7 @@ import org.apache.commons.lang3.Validate; import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -46,26 +67,6 @@ import karmaresearch.vlog.VLog; import karmaresearch.vlog.VLog.CyclicCheckResult; -/* - * #%L - * Rulewerk Core Components - * %% - * Copyright (C) 2018 - 2020 Rulewerk Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - /** * Reasoner implementation using the VLog backend. * @@ -205,9 +206,9 @@ void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { try { this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); + throw new RulewerkRuntimeException("Invalid data sources configuration.", e); } } @@ -224,7 +225,7 @@ void loadInMemoryDataSource(final DataSource dataSource, final Predicate predica try { load(predicate, inMemoryDataSource); } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); + throw new RulewerkRuntimeException("Invalid data sources configuration!", e); } } } @@ -279,7 +280,7 @@ void validateDataSourcePredicateArity(Predicate predicate, DataSource dataSource throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } } @@ -299,7 +300,7 @@ void loadFacts(final VLogKnowledgeBase vLogKB) { } } } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration!", e); + throw new RulewerkRuntimeException("Invalid data sources configuration!", e); } }); @@ -317,7 +318,7 @@ void loadRules(final VLogKnowledgeBase vLogKB) { } } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } } @@ -361,11 +362,11 @@ private void runChase() { this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final MaterializationException e) { // FIXME: the message generated here is not guaranteed to be the correct // interpretation of the exception that is caught - throw new RuntimeException( + throw new RulewerkRuntimeException( "Knowledge base incompatible with stratified negation: either the Rules are not stratifiable, or the variables in negated atom cannot be bound.", e); } @@ -390,7 +391,7 @@ public QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNul try { stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); @@ -412,7 +413,7 @@ public QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean include try { result = this.vLog.querySize(vLogAtom, true, filterBlanks); } catch (NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (NonExistingPredicateException e) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answer must be empty!"); @@ -435,7 +436,7 @@ public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final St try { this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state!", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); } catch (final NonExistingPredicateException e1) { LOGGER.warn("Query uses predicate " + query.getPredicate() + " that does not occur in the knowledge base. Answers are therefore empty."); @@ -475,9 +476,9 @@ public Correctness writeInferences(OutputStream stream) throws IOException { .getBytes()); } } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); } catch (final NonExistingPredicateException e1) { - throw new RuntimeException("Inconsistent knowledge base state.", e1); + throw new RulewerkRuntimeException("Inconsistent knowledge base state.", e1); } } @@ -562,7 +563,7 @@ public boolean isMFC() { try { checkCyclic = this.vLog.checkCyclic("MFC"); } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible + throw new RulewerkRuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.CYCLIC); } @@ -643,7 +644,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { try { load(); } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 - throw new RuntimeException(e); + throw new RulewerkRuntimeException(e); } } @@ -651,7 +652,7 @@ private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { try { checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); } catch (final NotStartedException e) { - throw new RuntimeException(e.getMessage(), e); // should be impossible + throw new RulewerkRuntimeException(e.getMessage(), e); // should be impossible } return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java index ecbc3ab07..39f802725 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerNegation.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,6 +28,7 @@ import java.util.Arrays; import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Literal; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -59,7 +60,7 @@ public class VLogReasonerNegation { private final Fact pEF = Expressions.makeFact("P", Arrays.asList(e, f)); private final Fact qCD = Expressions.makeFact("Q", Arrays.asList(c, d)); - @Test(expected = RuntimeException.class) + @Test(expected = RulewerkRuntimeException.class) public void testNotStratifiable() throws IOException { final PositiveLiteral qXY = Expressions.makePositiveLiteral("Q", x, y); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java index b95c1b005..238d488a1 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerSparqlInput.java @@ -1,8 +1,5 @@ package org.semanticweb.rulewerk.core.reasoner.implementation; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - /*- * #%L * Rulewerk Core Components @@ -12,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +20,9 @@ * #L% */ +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.net.URL; import java.util.Arrays; @@ -31,6 +31,7 @@ import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.QueryResult; import org.semanticweb.rulewerk.core.model.api.Variable; @@ -43,7 +44,7 @@ public class VLogReasonerSparqlInput { /** * Tests the query "SELECT ?b ?a WHERE {?a p:P22 ?b}" - * + * * @throws ReasonerStateException * @throws EdbIdbSeparationException * @throws IOException @@ -104,7 +105,7 @@ public void testSimpleSparqlQueryHttps() throws IOException { /** * Tests the query "SELECT ?b ?a WHERE {?a p:P22 ?b .}" - * + * * @throws ReasonerStateException * @throws EdbIdbSeparationException * @throws IOException @@ -136,7 +137,7 @@ public void testSimpleSparqlQuery2() throws IOException { } @Ignore // Ignored during CI because it makes lengthy calls to remote servers - @Test(expected = RuntimeException.class) + @Test(expected = RulewerkRuntimeException.class) public void testConjunctiveQueryNewLineCharacterInQueryBody() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java index f379970ba..5149abc41 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +29,7 @@ import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.Expressions; @@ -58,7 +59,7 @@ private ExamplesUtils() { * restrict the logging messages that are shown on the console or to change * their formatting. See the documentation of Log4J for details on how to do * this. - * + * * Note: The VLog C++ backend performs its own logging. The log-level for this * can be configured using * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. @@ -107,7 +108,7 @@ public static void printOutQueryAnswers(final String queryString, final Reasoner final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); printOutQueryAnswers(query, reasoner); } catch (final ParsingException e) { - throw new RuntimeException(e.getMessage(), e); + throw new RulewerkRuntimeException(e.getMessage(), e); } } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 6564a0a8c..2f10b7099 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -1,14 +1,5 @@ package org.semanticweb.rulewerk.owlapi; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import org.semanticweb.owlapi.apibinding.OWLManager; - /*- * #%L * Rulewerk OWL API Support @@ -18,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +20,14 @@ * #L% */ +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLAxiomVisitor; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; @@ -127,7 +126,7 @@ Variable getFreshExistentialVariable() { * simply dropped. Formulas that have only positive atoms (empty body) are * transformed into one or more facts. All other cases lead to a single rule * being added. - * + * * @param converter */ void addRule(final AbstractClassToRuleConverter converter) { @@ -167,7 +166,7 @@ Term replaceTerm(Term term, Term oldTerm, Term newTerm) { PositiveLiteralImpl makeTermReplacedLiteral(Literal literal, Term oldTerm, Term newTerm) { if (literal.isNegated()) { - throw new RuntimeException("Nonmonotonic negation of literals is not handled in OWL conversion."); + throw new OwlFeatureNotSupportedException("Nonmonotonic negation of literals is not handled in OWL conversion."); } return new PositiveLiteralImpl(literal.getPredicate(), literal.getTerms().map(term -> replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); @@ -178,12 +177,12 @@ PositiveLiteralImpl makeTermReplacedLiteral(Literal literal, Term oldTerm, Term * rules are renamings of class expressions, based on auxiliary class names * (unary predicates). The given term is the term used in this auxiliary * predicate. - * + * * Variables used in auxiliary atoms can be existentially quantified, but the * corresponding variable in auxiliary rules must always be universally * quantified. Therefore, if the given term is an existential variable, the * method will replace it by a universal one of the same name. - * + * * @param head * @param body * @param auxTerm @@ -216,7 +215,7 @@ void startAxiomConversion() { * buffers, and finally creating a rule from the collected body and head. The * conversions may lead to auxiliary rules being created during processing, so * additional rules besides the one that is added here might be created. - * + * * @param subClass * @param superClass */ diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index 4f1841ebc..06f02adca 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,23 +20,25 @@ * #L% */ +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + /** * Exception that indicates that the translation of OWL into rules has failed * due to an expressive feature of OWL that cannot be captured in rules. - * + * * @author Markus Krötzsch * */ -public class OwlFeatureNotSupportedException extends RuntimeException { +public class OwlFeatureNotSupportedException extends RulewerkRuntimeException { /** - * + * */ private static final long serialVersionUID = -194716185012512419L; /** * Creates a new exception. - * + * * @param cause * message explaining the error */ diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index aa02ca07a..43c4c1acb 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -1,14 +1,5 @@ package org.semanticweb.rulewerk.owlapi; -import java.io.UnsupportedEncodingException; -import java.math.BigInteger; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import java.util.Collection; - -import org.semanticweb.owlapi.model.OWLAnonymousIndividual; - /*- * #%L * Rulewerk OWL API Support @@ -18,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,20 +19,28 @@ * limitations under the License. * #L% */ +import java.io.UnsupportedEncodingException; +import java.math.BigInteger; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Collection; +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; import org.semanticweb.rulewerk.owlapi.AbstractClassToRuleConverter.SimpleConjunction; @@ -49,7 +48,7 @@ /** * Utility class for helper functions that are used to convert OWL API objects * to rules. - * + * * @author Markus Kroetzsch * */ @@ -57,7 +56,7 @@ public class OwlToRulesConversionHelper { /** * Returns a {@link Term} to represent an {@link OWLIndividual} in rules. - * + * * @param owlIndividual the individual to get a term for * @return a suitable term */ @@ -74,7 +73,7 @@ public static Term getIndividualTerm(final OWLIndividual owlIndividual) { /** * Returns a {@link Predicate} to represent an {@link OWLClass} in rules. - * + * * @param owlClass the atomic class to get a predicate for * @return a suitable unary predicate */ @@ -85,7 +84,7 @@ public static Predicate getClassPredicate(final OWLClass owlClass) { /** * Returns a {@link Predicate} to represent an {@link OWLObjectProperty} in * rules. - * + * * @param owlObjectProperty the atomic property to get a predicate for * @return a suitable binary predicate */ @@ -104,7 +103,7 @@ public static Predicate getAuxiliaryClassPredicate(final Collection Date: Mon, 2 Mar 2020 15:42:04 +0100 Subject: [PATCH 0822/1255] Fix javadoc --- pom.xml | 2 +- .../model/api/PrefixDeclarationRegistry.java | 8 +-- .../rulewerk/core/reasoner/Reasoner.java | 49 ++++++++++--------- .../LocalPrefixDeclarationRegistry.java | 12 ++++- 4 files changed, 41 insertions(+), 30 deletions(-) diff --git a/pom.xml b/pom.xml index 8ec79cb32..aa884b190 100644 --- a/pom.xml +++ b/pom.xml @@ -315,7 +315,7 @@ ${maven.javadoc.version} 1.8 - Rulewerk homepage]]> + Rulewerk homepage]]> diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java index 1d24f1daa..b9dc7386a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -87,9 +87,9 @@ public interface PrefixDeclarationRegistry extends Iterable @@ -387,11 +388,13 @@ default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { * explicit facts materialised by the reasoner.
        * An answer to the query is the term set of a fact that matches the * {@code query}: the fact predicate is the same as the {@code query} predicate, - * the {@link TermType#CONSTANT} terms of the {@code query} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code query} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#NAMED_NULL}). The - * same variable name identifies the same term in the answer fact.
        + * the {@link TermType#ABSTRACT_CONSTANT}, {@link TermType#DATATYPE_CONSTANT} + * and {@link TermType#LANGSTRING_CONSTANT} terms of the {@code query} appear in + * the answer fact at the same term position, and the + * {@link TermType#UNIVERSAL_VARIABLE} terms of the {@code query} are matched by + * terms in the fact, either named (any of the three constant types) or + * anonymous ({@link TermType#NAMED_NULL}). The same variable name identifies + * the same term in the answer fact.
        * * Depending on the state of the reasoning (materialisation) and its * {@link KnowledgeBase}, the answers can have a different {@link Correctness} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index 72e7d654a..dbedc4f97 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,12 +39,20 @@ final public class LocalPrefixDeclarationRegistry extends AbstractPrefixDeclarat */ private String fallbackIri; + /** + * Construct a Prefix declaration registry without an inherited base IRI. In + * this case, we default to {@value PrefixDeclarationRegistry#EMPTY_BASE}. + */ public LocalPrefixDeclarationRegistry() { this(PrefixDeclarationRegistry.EMPTY_BASE); // empty string encodes: "no base" (use relative IRIs) } /** + * Construct a Prefix declaration registry with a base IRI inherited from the + * importing file. * + * @param fallbackIri the IRI to use as a base if none is set by the imported + * file itself (i.e., if {@link #setBaseIri} is not called). */ public LocalPrefixDeclarationRegistry(String fallbackIri) { super(); From 79e430e200a869342356c7f614339c4115f04073 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 16:35:12 +0100 Subject: [PATCH 0823/1255] README: Fix reference to maven central --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 14a23a5e3..f950b8107 100644 --- a/README.md +++ b/README.md @@ -9,12 +9,12 @@ A Java library based on the [VLog rule engine](https://github.com/karmaresearch/ Installation ------------ -The current release of Rulewerk is version 0.5.0. The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: +The current release of Rulewerk is version 0.5.0 and was released as *vlog4j* (all future releases will be published as *rulewerk*). The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: ``` - org.semanticweb.rulewerk - rulewerk-core + org.semanticweb.vlog4j + vlog4j-core 0.5.0 ``` From 8a4b1f79ae0de3ed9b24b78eae352e3122e6cd71 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 16:52:02 +0100 Subject: [PATCH 0824/1255] Core: Change back to vlog-base --- rulewerk-core/pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml index 991c14904..35b7e5ea4 100644 --- a/rulewerk-core/pom.xml +++ b/rulewerk-core/pom.xml @@ -24,7 +24,7 @@ ${project.groupId} - rulewerk-base + vlog-base ${karmaresearch.vlog.version} @@ -50,7 +50,7 @@ ${project.groupId} - rulewerk-base + vlog-base ${karmaresearch.vlog.version} jar ./lib/jvlog-local.jar From 367cf52f35b0ac9b1c0b69ec0e3bc12cb52a1ce1 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:36:17 +0100 Subject: [PATCH 0825/1255] handle named nulls in vlog conversion --- .../reasoner/implementation/ModelToVLogConverter.java | 10 +++++++++- .../reasoner/implementation/TermToVLogConverter.java | 10 ++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index c146179c9..57db01a7d 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -27,6 +27,7 @@ import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Rule; import org.semanticweb.rulewerk.core.model.api.Term; @@ -80,7 +81,14 @@ static String[] toVLogFactTuple(final Fact fact) { int i = 0; for (final Term term : terms) { // No checks for type of term -- only constants allowed in facts! - vLogFactTuple[i] = TermToVLogConverter.getVLogNameForConstant((Constant)term); + if (term instanceof Constant) { + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForConstant((Constant) term); + } else if (term instanceof NamedNull) { + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForNamedNull((NamedNull) term); + } else { + throw new RuntimeException("Terms in facts must be constants of named nulls. Encountered " + term + + " of type " + term.getType() + "."); + } i++; } return vLogFactTuple; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 5fe7d568d..340e5ebd3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -85,6 +85,16 @@ public static String getVLogNameForConstant(Constant constant) { return constant.getName(); } } + + /** + * Converts the given named null to the name of a constant in VLog. + * + * @param named nul + * @return VLog constant string + */ + public static String getVLogNameForNamedNull(NamedNull namedNull) { + return "skolem__" + namedNull.getName(); + } /** * Converts the string representation of a constant in Rulewerk directly to the From d46c5497c295b2b317399589667e49553507200c Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:37:23 +0100 Subject: [PATCH 0826/1255] typo --- .../core/reasoner/implementation/TermToVLogConverter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 340e5ebd3..5b50cb606 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -89,7 +89,7 @@ public static String getVLogNameForConstant(Constant constant) { /** * Converts the given named null to the name of a constant in VLog. * - * @param named nul + * @param named null * @return VLog constant string */ public static String getVLogNameForNamedNull(NamedNull namedNull) { From 665f0a194be5f6f88f241f73bd7866d25294f68f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:44:39 +0100 Subject: [PATCH 0827/1255] Clarified documentation --- .../core/reasoner/implementation/TermToVLogConverter.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index 5b50cb606..c143490c5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -85,9 +85,9 @@ public static String getVLogNameForConstant(Constant constant) { return constant.getName(); } } - + /** - * Converts the given named null to the name of a constant in VLog. + * Converts the given named null to the name of a (skolem) constant in VLog. * * @param named null * @return VLog constant string @@ -132,8 +132,8 @@ public karmaresearch.vlog.Term visit(ExistentialVariable term) { } /** - * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same name - * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. + * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same + * name and type {@link karmaresearch.vlog.Term.TermType#BLANK}. */ @Override public karmaresearch.vlog.Term visit(NamedNull term) { From 73a5ab61149836ad0ae9983571da8159de564235 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:46:51 +0100 Subject: [PATCH 0828/1255] Test the skolemisation of blanks in facts --- .../implementation/ModelToVLogConverterTest.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index 8ff7491e2..ee030eac3 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -117,6 +117,15 @@ public void testToVLogTermBlank() { assertEquals(expectedVLogTerm, vLogTerm); } + @Test + public void testToVLogTermBlankSkolemization() { + final NamedNull blank = new NamedNullImpl("blank"); + + final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); + + assertEquals("skolem__blank", vLogSkolemConstant); + } + @Test public void testToVLogTermArray() { final Variable vx = Expressions.makeUniversalVariable("x"); From 2734cd079cc575a8f9e4116bb3a679960b6cc785 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 17:49:03 +0100 Subject: [PATCH 0829/1255] added fixme on potential issues with this solution --- .../core/reasoner/implementation/TermToVLogConverter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index c143490c5..eb1b6255f 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -89,6 +89,9 @@ public static String getVLogNameForConstant(Constant constant) { /** * Converts the given named null to the name of a (skolem) constant in VLog. * + * @fixme This skolemisation approach might lead to constants that clash with + * existing constant names. + * * @param named null * @return VLog constant string */ From 329b249f9a5dc0fb50aeff3bf635ce8c59a17a5f Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 21:59:11 +0100 Subject: [PATCH 0830/1255] Turn unlikely exception into RuntimeEx --- .../reasoner/implementation/Skolemization.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java index aaa9d1f2f..d11d26849 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.UUID; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.NamedNull; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; @@ -47,15 +48,17 @@ public class Skolemization { * a {@link RenamedNamedNull} instance with the same name when * called on the same instance. * - * @throws IOException when ByteArrayOutputStream throws. * @return a {@link RenamedNamedNull} instance with a new name * that is specific to this instance and {@code name}. */ - public RenamedNamedNull skolemizeNamedNull(String name) throws IOException { + public RenamedNamedNull skolemizeNamedNull(String name) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); - stream.write(namedNullNamespace); - stream.write(name.getBytes()); - - return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + try { + stream.write(namedNullNamespace); + stream.write(name.getBytes()); + return new RenamedNamedNull(UUID.nameUUIDFromBytes(stream.toByteArray())); + } catch (IOException e) { + throw new RulewerkRuntimeException(e.getMessage(), e); + } } } From f50620ada099da6bde48094c45f1118adcc066dc Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 21:59:38 +0100 Subject: [PATCH 0831/1255] Throw Rulewerk exception --- .../core/reasoner/implementation/ModelToVLogConverter.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java index 57db01a7d..ae7f59597 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverter.java @@ -23,6 +23,7 @@ import java.util.Collection; import java.util.List; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Conjunction; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; @@ -86,8 +87,8 @@ static String[] toVLogFactTuple(final Fact fact) { } else if (term instanceof NamedNull) { vLogFactTuple[i] = TermToVLogConverter.getVLogNameForNamedNull((NamedNull) term); } else { - throw new RuntimeException("Terms in facts must be constants of named nulls. Encountered " + term - + " of type " + term.getType() + "."); + throw new RulewerkRuntimeException("Terms in facts must be constants or named nulls. Encountered " + + term + " of type " + term.getType() + "."); } i++; } From bd76d8032ba7e2e4aa171ec8f588be642b989520 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:00:07 +0100 Subject: [PATCH 0832/1255] Use Skolemization; support RenamedNamedNulls --- .../implementation/TermToVLogConverter.java | 12 ++++++++---- .../implementation/ModelToVLogConverterTest.java | 16 +++++++++++++++- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java index eb1b6255f..64bc83db3 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TermToVLogConverter.java @@ -29,6 +29,7 @@ import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; import org.semanticweb.rulewerk.core.model.api.TermVisitor; import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; /** * A visitor that converts {@link Term}s of different types to corresponding @@ -39,6 +40,8 @@ */ class TermToVLogConverter implements TermVisitor { + static final Skolemization skolemization = new Skolemization(); + /** * Transforms an abstract constant to a {@link karmaresearch.vlog.Term} with the * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. @@ -89,14 +92,15 @@ public static String getVLogNameForConstant(Constant constant) { /** * Converts the given named null to the name of a (skolem) constant in VLog. * - * @fixme This skolemisation approach might lead to constants that clash with - * existing constant names. - * * @param named null * @return VLog constant string */ public static String getVLogNameForNamedNull(NamedNull namedNull) { - return "skolem__" + namedNull.getName(); + if (namedNull instanceof RenamedNamedNull) { + return namedNull.getName(); + } else { + return skolemization.skolemizeNamedNull(namedNull.getName()).getName(); + } } /** diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index ee030eac3..9bdfdb02e 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -22,12 +22,14 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.UUID; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.NamedNull; @@ -39,6 +41,7 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; @@ -123,7 +126,18 @@ public void testToVLogTermBlankSkolemization() { final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); - assertEquals("skolem__blank", vLogSkolemConstant); + assertNotEquals("blank", vLogSkolemConstant); + assertEquals(36,vLogSkolemConstant.length()); // length of a UUID + } + + @Test + public void testToVLogTermBlankRenamedSkolemization() { + final UUID uuid = UUID.randomUUID(); + final NamedNull blank = new RenamedNamedNull(uuid); + + final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); + + assertEquals(uuid.toString(), vLogSkolemConstant); } @Test From 1b5d064a2caa9313ba63dc499bb9782dcb7d1cb2 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:02:26 +0100 Subject: [PATCH 0833/1255] Remove unused exception handling --- .../rulewerk/parser/javacc/JavaCCParserBase.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index f61a80218..2ec72e8de 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -162,11 +162,7 @@ Constant createConstant(String lexicalForm, String datatype) throws ParseExcepti } NamedNull createNamedNull(String lexicalForm) throws ParseException { - try { - return this.skolemization.skolemizeNamedNull(lexicalForm); - } catch (IOException e) { - throw makeParseExceptionWithCause("Failed to generate a unique name for named null", e); - } + return this.skolemization.skolemizeNamedNull(lexicalForm); } void addStatement(Statement statement) { From 05099f43f732caa5791771da942cf98ac135cc22 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:02:51 +0100 Subject: [PATCH 0834/1255] -unused import --- .../org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java index 2ec72e8de..3e38aefde 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -20,7 +20,6 @@ * #L% */ -import java.io.IOException; import java.util.HashSet; import java.util.List; From 9cde81bff290ba1ec922db5d6e7517345a09441e Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:25:06 +0100 Subject: [PATCH 0835/1255] Also use Skolemization for bnodes in OWL --- .../owlapi/ClassToRuleBodyConverter.java | 2 +- .../owlapi/ClassToRuleHeadConverter.java | 2 +- .../owlapi/OwlAxiomToRulesConverter.java | 25 ++++++++++++++----- .../owlapi/OwlToRulesConversionHelper.java | 6 ++--- .../rulewerk/owlapi/OwlToRulesConverter.java | 4 +-- 5 files changed, 26 insertions(+), 13 deletions(-) diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java index 11818553f..12ab8434c 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java @@ -117,7 +117,7 @@ public void visit(final OWLObjectAllValuesFrom ce) { @Override public void visit(final OWLObjectHasValue ce) { - final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller()); + final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller(), parent.skolemization); OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, term, this.body); } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java index 2857286c6..6e87333d6 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java @@ -112,7 +112,7 @@ public void visit(final OWLObjectAllValuesFrom ce) { @Override public void visit(final OWLObjectHasValue ce) { - final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller()); + final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller(), parent.skolemization); OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, term, this.head); } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java index 2f10b7099..80390026e 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -82,6 +82,7 @@ import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; /** * Class for converting OWL axioms to rules. @@ -91,6 +92,8 @@ */ public class OwlAxiomToRulesConverter implements OWLAxiomVisitor { + Skolemization skolemization = new Skolemization(); + static OWLDataFactory owlDataFactory = OWLManager.getOWLDataFactory(); final Set rules = new HashSet<>(); @@ -98,6 +101,15 @@ public class OwlAxiomToRulesConverter implements OWLAxiomVisitor { final Variable frontierVariable = new UniversalVariableImpl("X"); int freshVariableCounter = 0; + /** + * Changes the renaming function for blank node IDs. Blank nodes with the same + * local ID will be represented differently before and after this function is + * called, but will retain a constant interpretation otherwise. + */ + public void startNewBlankNodeContext() { + skolemization = new Skolemization(); + } + /** * Returns a fresh universal variable, which can be used as auxiliary variable * in the current axiom's translation. @@ -166,7 +178,8 @@ Term replaceTerm(Term term, Term oldTerm, Term newTerm) { PositiveLiteralImpl makeTermReplacedLiteral(Literal literal, Term oldTerm, Term newTerm) { if (literal.isNegated()) { - throw new OwlFeatureNotSupportedException("Nonmonotonic negation of literals is not handled in OWL conversion."); + throw new OwlFeatureNotSupportedException( + "Nonmonotonic negation of literals is not handled in OWL conversion."); } return new PositiveLiteralImpl(literal.getPredicate(), literal.getTerms().map(term -> replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); @@ -242,8 +255,8 @@ public void visit(final OWLSubClassOfAxiom axiom) { @Override public void visit(final OWLNegativeObjectPropertyAssertionAxiom axiom) { - final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject()); - final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject()); + final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject(), skolemization); + final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject(), skolemization); final Literal atom = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), subject, object); final PositiveLiteral bot = OwlToRulesConversionHelper.getBottom(subject); this.rules.add(Expressions.makeRule(bot, atom)); @@ -346,8 +359,8 @@ public void visit(final OWLObjectPropertyRangeAxiom axiom) { @Override public void visit(final OWLObjectPropertyAssertionAxiom axiom) { - final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject()); - final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject()); + final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject(), skolemization); + final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject(), skolemization); this.facts.add(OwlToRulesConversionHelper.getObjectPropertyFact(axiom.getProperty(), subject, object)); } @@ -409,7 +422,7 @@ public void visit(final OWLClassAssertionAxiom axiom) { void visitClassAssertionAxiom(final OWLIndividual individual, final OWLClassExpression classExpression) { this.startAxiomConversion(); - final Term term = OwlToRulesConversionHelper.getIndividualTerm(individual); + final Term term = OwlToRulesConversionHelper.getIndividualTerm(individual, skolemization); final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(term, this); classExpression.accept(headConverter); this.addRule(headConverter); diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java index 43c4c1acb..802161334 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -40,9 +40,9 @@ import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; import org.semanticweb.rulewerk.core.model.implementation.FactImpl; -import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; import org.semanticweb.rulewerk.owlapi.AbstractClassToRuleConverter.SimpleConjunction; /** @@ -60,11 +60,11 @@ public class OwlToRulesConversionHelper { * @param owlIndividual the individual to get a term for * @return a suitable term */ - public static Term getIndividualTerm(final OWLIndividual owlIndividual) { + public static Term getIndividualTerm(final OWLIndividual owlIndividual, Skolemization skolemization) { if (owlIndividual instanceof OWLNamedIndividual) { return new AbstractConstantImpl(((OWLNamedIndividual) owlIndividual).getIRI().toString()); } else if (owlIndividual instanceof OWLAnonymousIndividual) { - return new NamedNullImpl(((OWLAnonymousIndividual) owlIndividual).getID().toString()); + return skolemization.skolemizeNamedNull(((OWLAnonymousIndividual) owlIndividual).getID().toString()); } else { throw new OwlFeatureNotSupportedException( "Could not convert OWL individual '" + owlIndividual.toString() + "' to a term."); diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java index 4273bae87..f13f724fe 100644 --- a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -40,10 +40,10 @@ public class OwlToRulesConverter { * Converts the given OWL ontology to rules and facts, and adds the result to * the internal buffer of rules and facts for later retrieval. * - * @param owlOntology - * the ontology + * @param owlOntology the ontology */ public void addOntology(final OWLOntology owlOntology) { + this.owlAxiomToRulesConverter.startNewBlankNodeContext(); owlOntology.axioms().forEach(owlAxiom -> owlAxiom.accept(this.owlAxiomToRulesConverter)); } From 14e29a3625d507a8e290fa9259acaf2a4ba77b4d Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:27:28 +0100 Subject: [PATCH 0836/1255] Remove unused imports --- .../implementation/VLogReasonerWriteInferencesTest.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java index 24e747cdb..99c6a68f4 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerWriteInferencesTest.java @@ -9,14 +9,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.Before; import org.junit.Test; -import org.mockito.internal.util.collections.Sets; import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; import org.semanticweb.rulewerk.core.model.api.AbstractConstant; import org.semanticweb.rulewerk.core.model.api.Conjunction; From 7915c71ae2329359fdd1caa4f7d49d4d25ba0327 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 22:51:30 +0100 Subject: [PATCH 0837/1255] Test new cases --- .../ModelToVLogConverterTest.java | 42 ++++++++++++++++++- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java index 9bdfdb02e..b88e5e3ef 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/ModelToVLogConverterTest.java @@ -33,14 +33,17 @@ import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; import org.semanticweb.rulewerk.core.model.api.Constant; import org.semanticweb.rulewerk.core.model.api.Fact; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; import org.semanticweb.rulewerk.core.model.api.Term; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; import org.semanticweb.rulewerk.core.model.implementation.Expressions; import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; @@ -127,9 +130,9 @@ public void testToVLogTermBlankSkolemization() { final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); assertNotEquals("blank", vLogSkolemConstant); - assertEquals(36,vLogSkolemConstant.length()); // length of a UUID + assertEquals(36, vLogSkolemConstant.length()); // length of a UUID } - + @Test public void testToVLogTermBlankRenamedSkolemization() { final UUID uuid = UUID.randomUUID(); @@ -187,6 +190,41 @@ public void testToVLogFactTuples() { assertArrayEquals(expectedTuples, vLogTuples); } + @Test + public void testToVLogFactTupleNulls() { + final UUID uuid = UUID.randomUUID(); + final NamedNull n = new RenamedNamedNull(uuid); + final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(n)); + + final String[] expectedTuple = { uuid.toString() }; + + final String[] actualTuple = ModelToVLogConverter.toVLogFactTuple(atom1); + + assertArrayEquals(expectedTuple, actualTuple); + } + + @Test(expected = RulewerkRuntimeException.class) + public void testToVLogFactTupleUnsupported() { + // We need a fact that accepts exception-causing terms in the first place: + class NonValidatingFact extends PositiveLiteralImpl implements Fact { + + public NonValidatingFact(Predicate predicate, List terms) { + super(predicate, terms); + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + } + + final Variable x = Expressions.makeUniversalVariable("X"); + final Fact atom1 = new NonValidatingFact(Expressions.makePredicate("p1", 1), Arrays.asList(x)); + + ModelToVLogConverter.toVLogFactTuple(atom1); + } + @Test public void testToVLogPredicate() { final Predicate predicate = Expressions.makePredicate("pred", 1); From 3a115debdce2d29fd14b9584267001621897c082 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 23:23:01 +0100 Subject: [PATCH 0838/1255] Test bnode diversification across ontologies --- .../owlapi/OwlAxiomToRulesConverterTest.java | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java index d561b779c..1024bcab3 100644 --- a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java @@ -634,16 +634,16 @@ public void testObjectPropertyRange() { assertEquals(Collections.singleton(rule), converter.rules); } - + /* * A \sqsubseteq <1 .R */ @Test(expected = OwlFeatureNotSupportedException.class) public void testSubClassOfMaxCardinality() { - + OWLClassExpression maxCard = df.getOWLObjectMaxCardinality(1, pR); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, maxCard ); - + OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, maxCard); + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); } @@ -695,7 +695,7 @@ public void testNominalsInConjunctionLeftSubClassOfClass() { final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); axiom.accept(converter); } - + /* * (B \sqcap {a,b}) \sqsubseteq A */ @@ -710,9 +710,8 @@ public void testNominalsInConjunctionRightSubClassOfClass() { axiom.accept(converter); } - /* - * A \sqsubseteq (B \sqcap {a,b}) + * A \sqsubseteq (B \sqcap {a,b}) */ @Test(expected = OwlFeatureNotSupportedException.class) public void testClassSubClassOfNominalsInConjunctionRight() { @@ -724,7 +723,6 @@ public void testClassSubClassOfNominalsInConjunctionRight() { axiom.accept(converter); } - /* * A \sqsubseteq {a} */ From 37c4dfbc36f10d260232d60e1bc6f98a22ef10b8 Mon Sep 17 00:00:00 2001 From: mkroetzsch Date: Mon, 2 Mar 2020 23:23:19 +0100 Subject: [PATCH 0839/1255] Test bnode diversification across ontologies --- .../owlapi/OwlToRulesConverterTest.java | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java new file mode 100644 index 000000000..88281145e --- /dev/null +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java @@ -0,0 +1,48 @@ +package org.semanticweb.rulewerk.owlapi; + +import static org.junit.Assert.*; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; + +public class OwlToRulesConverterTest { + + static OWLDataFactory df = OWLManager.getOWLDataFactory(); + + public static IRI getIri(final String localName) { + return IRI.create("http://example.org/" + localName); + } + + public static OWLClass getOwlClass(final String localName) { + return df.getOWLClass(getIri(localName)); + } + + static final OWLClass cC = getOwlClass("C"); + static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); + + @Test + public void testLoadOntologies() throws OWLOntologyCreationException { + final OWLAnonymousIndividual bnode = df.getOWLAnonymousIndividual("abc"); + final OWLAxiom Cn = df.getOWLClassAssertionAxiom(cC, bnode); + final OWLAxiom Ca = df.getOWLClassAssertionAxiom(cC, inda); + + final OWLOntology ontology = OWLManager.createOWLOntologyManager().createOntology(Arrays.asList(Cn,Ca)); + + final OwlToRulesConverter converter = new OwlToRulesConverter(); + converter.addOntology(ontology); + converter.addOntology(ontology); + + assertEquals(3, converter.getFacts().size()); + } + +} From 83dde1b2eb913aec6f66d01a533ed100fc851b06 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Tue, 3 Mar 2020 17:44:22 +0100 Subject: [PATCH 0840/1255] Core: Fix merging of prefix declarations with an empty base --- .../AbstractPrefixDeclarationRegistry.java | 10 ++++++--- .../MergingPrefixDeclarationRegistry.java | 19 ++++++++++++---- .../MergingPrefixDeclarationRegistryTest.java | 22 +++++++++++++++++++ .../owlapi/OwlToRulesConverterTest.java | 20 +++++++++++++++++ .../LocalPrefixDeclarationRegistry.java | 1 - 5 files changed, 64 insertions(+), 8 deletions(-) diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java index 3e9127f74..48afd6a1e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -44,10 +44,14 @@ public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclara /** * Iri holding the base namespace. */ - protected String baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + protected String baseUri = null; @Override public String getBaseIri() { + if (baseUri == null) { + baseUri = PrefixDeclarationRegistry.EMPTY_BASE; + } + return baseUri; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java index 8b6ebe16b..94570bd0a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -65,11 +65,21 @@ public MergingPrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDe */ @Override public void setBaseIri(String baseIri) { - if (baseIri != this.baseUri && this.baseUri != PrefixDeclarationRegistry.EMPTY_BASE) { - prefixes.put(getFreshPrefix(), this.baseUri); + if (baseIri == this.baseUri) { + return; } - this.baseUri = baseIri; + if (this.baseUri == null) { + this.baseUri = baseIri; + } else if (this.baseUri == PrefixDeclarationRegistry.EMPTY_BASE) { + // we need to keep the empty base, so that we don't + // accidentally relativise absolute Iris to + // baseIri. Hence, introduce baseIri as a fresh prefix. + prefixes.put(getFreshPrefix(), baseIri); + } else { + prefixes.put(getFreshPrefix(), this.baseUri); + this.baseUri = baseIri; + } } /** @@ -96,8 +106,9 @@ public void setPrefixIri(String prefixName, String prefixIri) { */ public String unresolveAbsoluteIri(String iri) { Map matches = new HashMap<>(); + String baseIri = getBaseIri(); - if (baseUri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseUri) && !iri.equals(baseUri)) { + if (baseIri != PrefixDeclarationRegistry.EMPTY_BASE && iri.startsWith(baseIri) && !iri.equals(baseIri)) { matches.put(iri.replaceFirst(baseUri, PrefixDeclarationRegistry.EMPTY_BASE), baseUri.length()); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java index 131d5a712..9f06ee6d2 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/MergingPrefixDeclarationRegistryTest.java @@ -21,6 +21,7 @@ */ import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import org.junit.Before; import org.junit.Test; @@ -185,4 +186,25 @@ public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws Pref String resolved = prefixDeclarations.resolvePrefixedName(unresolved); assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved)); } + + @Test + public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative() throws PrefixDeclarationException { + String relativeIri = this.prefixDeclarations.absolutizeIri(RELATIVE); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(BASE); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + assertEquals(relativeIri, this.prefixDeclarations.unresolveAbsoluteIri(relativeIri)); + } + + @Test + public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() throws PrefixDeclarationException { + assertEquals("", this.prefixDeclarations.getBaseIri()); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(BASE); + String absoluteIri = prefixDeclarations.absolutizeIri(RELATIVE); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri); + assertNotEquals(RELATIVE, resolvedIri); + assertEquals("rulewerk_generated_0:" + RELATIVE, resolvedIri); + } } diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java index 88281145e..7b6fd9533 100644 --- a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java @@ -1,5 +1,25 @@ package org.semanticweb.rulewerk.owlapi; +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import static org.junit.Assert.*; import java.util.Arrays; diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java index dbedc4f97..a72def47d 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -57,7 +57,6 @@ public LocalPrefixDeclarationRegistry() { public LocalPrefixDeclarationRegistry(String fallbackIri) { super(); this.fallbackIri = fallbackIri; - this.baseUri = null; } /** From 6015e752bb5eef4c9473d9c5345f4605da9ce088 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 18:24:39 +0100 Subject: [PATCH 0841/1255] Core: Make FileDataSource serialization OS-independent Fixes #140. --- RELEASE-NOTES.md | 2 + .../core/model/implementation/Serializer.java | 2 +- .../implementation/CsvFileDataSource.java | 11 ++--- .../implementation/FileDataSource.java | 46 +++++++++++++------ .../implementation/RdfFileDataSource.java | 8 ++-- .../core/model/DataSourceDeclarationTest.java | 21 ++++----- .../implementation/AddDataSourceTest.java | 24 +++++----- .../implementation/CsvFileDataSourceTest.java | 26 +++++------ .../FileDataSourceTestUtils.java | 8 ++-- .../implementation/RdfFileDataSourceTest.java | 14 +++--- .../VLogReasonerCombinedInputs.java | 12 ++--- .../implementation/VLogReasonerCsvInput.java | 22 ++++----- .../implementation/VLogReasonerRdfInput.java | 18 ++++---- .../implementation/VLogReasonerStateTest.java | 7 ++- .../examples/graal/DoidExampleGraal.java | 7 ++- .../CsvFileDataSourceDeclarationHandler.java | 11 ++--- .../RdfFileDataSourceDeclarationHandler.java | 11 ++--- .../parser/RuleParserDataSourceTest.java | 15 +++--- 18 files changed, 134 insertions(+), 131 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 5d2119244..159b43158 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -9,6 +9,8 @@ Breaking changes: * In the example package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no longer exist. It can be replaced by `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` +* The `FileDataSource` constructor and those of derived classes now + take the path to a file instead of `File` object. New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index 0a2668cd9..d83045815 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -380,7 +380,7 @@ public static String getString(final SparqlQueryResultDataSource dataSource) { } private static String getFileString(final FileDataSource fileDataSource) { - return getString(fileDataSource.getFile().toString()); + return getString(fileDataSource.getPath().toString()); } private static String getIRIString(final String string) { diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java index 83aff537a..7b7812b4c 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.Arrays; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.implementation.Serializer; /** @@ -57,15 +56,15 @@ public class CsvFileDataSource extends FileDataSource { /** * Constructor. * - * @param csvFile a file of a {@code .csv} or {@code .csv.gz} extension and a - * valid CSV format. + * @param csvFile path to a file of a {@code .csv} or {@code .csv.gz} extension + * and a valid CSV format. * @throws IOException if the path of the given {@code csvFile} is * invalid. * @throws IllegalArgumentException if the extension of the given * {@code csvFile} does not occur in * {@link #possibleExtensions}. */ - public CsvFileDataSource(@NonNull final File csvFile) throws IOException { + public CsvFileDataSource(final String csvFile) throws IOException { super(csvFile, possibleExtensions); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java index 4d79ae3a2..d65bc7af1 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,7 +28,6 @@ import java.util.stream.StreamSupport; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; /** * A {@code FileDataSource} is an abstract implementation of a storage for fact @@ -44,6 +43,8 @@ public abstract class FileDataSource extends VLogDataSource { private final static String DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; private final File file; + private final String filePath; + private final String fileName; private final String extension; /** * The canonical path to the parent directory where the file resides. @@ -54,7 +55,8 @@ public abstract class FileDataSource extends VLogDataSource { /** * Constructor. * - * @param file a file that will serve as storage for fact terms. + * @param filePath path to a file that will serve as storage for fact + * terms. * @param possibleExtensions a list of extensions that the files could have * @throws IOException if the path of the given {@code file} is * invalid. @@ -62,24 +64,24 @@ public abstract class FileDataSource extends VLogDataSource { * does not occur in * {@code possibleExtensions}. */ - public FileDataSource(@NonNull final File file, final Iterable possibleExtensions) throws IOException { - Validate.notNull(file, "Data source file cannot be null!"); - final String fileName = file.getName(); - - this.file = file; + public FileDataSource(final String filePath, final Iterable possibleExtensions) throws IOException { + Validate.notBlank(filePath, "Data source file name cannot be null!"); + + this.file = new File(filePath); + this.filePath = filePath.replaceAll("\\\\", "/"); // canonicalise windows-style path separators + this.fileName = this.filePath.substring(this.filePath.lastIndexOf("/") + 1); // just the file name + this.extension = getValidExtension(this.fileName, possibleExtensions); + this.fileNameWithoutExtension = this.fileName.substring(0, this.fileName.lastIndexOf(this.extension)); this.dirCanonicalPath = Paths.get(file.getCanonicalPath()).getParent().toString(); - this.extension = getValidExtension(file, possibleExtensions); - this.fileNameWithoutExtension = fileName.substring(0, fileName.lastIndexOf(this.extension)); } - private String getValidExtension(final File file, final Iterable possibleExtensions) { - final String fileName = file.getName(); + private String getValidExtension(final String fileName, final Iterable possibleExtensions) { final Stream extensionsStream = StreamSupport.stream(possibleExtensions.spliterator(), true); - final Optional potentialExtension = extensionsStream.filter(ex -> fileName.endsWith(ex)).findFirst(); + final Optional potentialExtension = extensionsStream.filter(fileName::endsWith).findFirst(); if (!potentialExtension.isPresent()) { throw new IllegalArgumentException("Expected one of the following extensions for the data source file " - + file + ": " + String.join(", ", possibleExtensions) + "."); + + fileName + ": " + String.join(", ", possibleExtensions) + "."); } return potentialExtension.get(); @@ -104,7 +106,16 @@ public File getFile() { return this.file; } + public String getPath() { + return this.filePath; + } + + public String getName() { + return this.fileName; + } + /** + * Canonicalise the file path * * @return The canonical path to the parent directory where the file resides. */ @@ -112,6 +123,11 @@ String getDirCanonicalPath() { return this.dirCanonicalPath; } + /** + * Get the base name of the file, without an extension. + * + * @return the file basename without any extension. + */ String getFileNameWithoutExtension() { return this.fileNameWithoutExtension; } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java index c1274aac2..e56148544 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -55,7 +55,7 @@ public class RdfFileDataSource extends FileDataSource { /** * Constructor. * - * @param rdfFile a file of a {@code .nt} or {@code .nt.gz} extension and a + * @param rdfFile path to a file of a {@code .nt} or {@code .nt.gz} extension and a * valid N-Triples format. * @throws IOException if the path of the given {@code rdfFile} is * invalid. @@ -63,7 +63,7 @@ public class RdfFileDataSource extends FileDataSource { * {@code rdfFile} does not occur in * {@link #possibleExtensions}. */ - public RdfFileDataSource(final File rdfFile) throws IOException { + public RdfFileDataSource(final String rdfFile) throws IOException { super(rdfFile, possibleExtensions); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java index 32ef82333..7c5ad3cba 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/DataSourceDeclarationTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,7 +28,6 @@ import java.net.MalformedURLException; import java.net.URL; -import org.junit.Ignore; import org.junit.Test; import org.semanticweb.rulewerk.core.model.api.DataSource; import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; @@ -84,25 +83,23 @@ public void toString_SparqlQueryResultDataSource() throws IOException { @Test public void toString_CsvFileDataSource() throws IOException { final Predicate predicate2 = Expressions.makePredicate("q", 1); - final String relativeDirName = "dir"; + final String relativeDirName = "dir/"; final String fileName = "file.csv"; - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(relativeDirName, fileName)); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(relativeDirName + fileName); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, unzippedCsvFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + final String expectedFilePath = Serializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } - // FIXME: have String representation of files OS independent - @Ignore @Test public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { final Predicate predicate = Expressions.makePredicate("q", 1); final String absoluteFilePathWindows = "D:\\input\\file.csv"; - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(absoluteFilePathWindows)); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(absoluteFilePathWindows); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedCsvFileDataSource); assertEquals("@source q[1]: load-csv(\"D:/input/file.csv\") .", dataSourceDeclaration.toString()); @@ -111,14 +108,14 @@ public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throw @Test public void toString_RdfFileDataSource_relativePath() throws IOException { final Predicate predicate = Expressions.makePredicate("q", 1); - final String relativeDirName = "dir"; + final String relativeDirName = "dir/"; final String fileName = "file.nt"; - final File unzippedRdfFile = new File(relativeDirName, fileName); + final String unzippedRdfFile = relativeDirName + fileName; final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, unzippedRdfFileDataSource); - final String expectedFilePath = Serializer.getString(relativeDirName + File.separator + fileName); + final String expectedFilePath = Serializer.getString(relativeDirName + fileName); assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); } } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java index 87d84362f..ce8ae45ef 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/AddDataSourceTest.java @@ -12,9 +12,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -64,7 +64,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); final Fact factPredicateQArity1 = Expressions.makeFact("q", Arrays.asList(constantA)); final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(factPredicatePArity2); @@ -93,7 +93,7 @@ public void testAddDataSourceExistentDataForDifferentPredicates() throws IOExcep public void testAddDataSourceBeforeLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); @@ -119,7 +119,7 @@ public void testAddDataSourceBeforeLoading() throws IOException { public void testAddDataSourceAfterLoading() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); @@ -150,7 +150,7 @@ public void testAddDataSourceAfterLoading() throws IOException { public void testAddDataSourceAfterReasoning() throws IOException { final Predicate predicateP = Expressions.makePredicate("p", 1); final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final KnowledgeBase kb = new KnowledgeBase(); @@ -181,8 +181,8 @@ public void testAddDataSourceAfterReasoning() throws IOException { @Test public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); - final DataSource dataSource2 = new CsvFileDataSource(new File(CSV_FILE_c_d_PATH)); + final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + final DataSource dataSource2 = new CsvFileDataSource(CSV_FILE_c_d_PATH); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); @@ -202,7 +202,7 @@ public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOExcept @Test public void testAddDataSourceNoFactsForPredicate() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), Arrays.asList(Expressions.makeAbstractConstant("a"))); @@ -222,9 +222,9 @@ public void testAddDataSourceNoFactsForPredicate() throws IOException { @Test public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource1 = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final DataSource dataSource2 = new CsvFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv")); + FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); @@ -247,7 +247,7 @@ public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOExce @Test public void testAddDataSourceAndFactsForPredicateAfterReasoning() throws IOException { final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_c1_c2_PATH)); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), Arrays.asList(Expressions.makeAbstractConstant("a"))); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java index f9b840f1e..e94173d20 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,25 +41,23 @@ public void testConstructorNullFile() throws IOException { @Test(expected = IllegalArgumentException.class) public void testConstructorFalseExtension() throws IOException { - new CsvFileDataSource(new File(ntFile)); + new CsvFileDataSource(ntFile); } @Test public void testConstructor() throws IOException { - final File unzippedCsvFile = new File(csvFile); - final File zippedCsvFile = new File(gzFile); final String dirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(unzippedCsvFile); - final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(zippedCsvFile); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); - FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, unzippedCsvFile, dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, zippedCsvFile, dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, new File(csvFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName(), dirCanonicalPath, "file"); } @Test public void testToConfigString() throws IOException { - final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(csvFile)); - final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(new File(gzFile)); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" @@ -71,8 +69,7 @@ public void testToConfigString() throws IOException { @Test public void testNoParentDir() throws IOException { - final File file = new File("file.csv"); - final FileDataSource fileDataSource = new CsvFileDataSource(file); + final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); final String currentFolder = new File(".").getCanonicalPath(); assertEquals(currentFolder, dirCanonicalPath); @@ -80,8 +77,7 @@ public void testNoParentDir() throws IOException { @Test public void testNotNormalisedParentDir() throws IOException { - final File file = new File("./././file.csv"); - final FileDataSource fileDataSource = new CsvFileDataSource(file); + final FileDataSource fileDataSource = new CsvFileDataSource("./././file.csv"); final String dirCanonicalPath = fileDataSource.getDirCanonicalPath(); final String currentFolder = new File(".").getCanonicalPath(); assertEquals(currentFolder, dirCanonicalPath); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java index 74a5215fe..ea714f865 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -14,9 +14,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -92,9 +92,9 @@ public static List> getCSVContent(final String csvFile) throws IOEx return content; } - public static void testConstructor(final FileDataSource fileDataSource, final File expectedFile, + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName, final String expectedDirCanonicalPath, final String expectedFileNameWithoutExtension) throws IOException { - assertEquals(expectedFile, fileDataSource.getFile()); + assertEquals(expectedFileName, fileDataSource.getName()); assertEquals(expectedDirCanonicalPath, fileDataSource.getDirCanonicalPath()); assertEquals(expectedFileNameWithoutExtension, fileDataSource.getFileNameWithoutExtension()); } diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java index c5baf8bde..70e8b4657 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -11,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,8 +30,8 @@ public class RdfFileDataSourceTest { - private final File unzippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"); - private final File zippedRdfFile = new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"); + private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; + private final String zippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"; @Test(expected = NullPointerException.class) public void testConstructorNullFile() throws IOException { @@ -40,7 +40,7 @@ public void testConstructorNullFile() throws IOException { @Test(expected = IllegalArgumentException.class) public void testConstructorFalseExtension() throws IOException { - new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "file.csv")); + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"); } @Test @@ -50,8 +50,8 @@ public void testConstructor() throws IOException { final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); - FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, unzippedRdfFile, dirCanonicalPath, "file"); - FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, zippedRdfFile, dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName(), dirCanonicalPath, "file"); + FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName(), dirCanonicalPath, "file"); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java index 6c305e40d..b3f2fba74 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCombinedInputs.java @@ -2,8 +2,6 @@ import static org.junit.Assert.assertEquals; -import java.io.File; - /*- * #%L * Rulewerk Core Components @@ -13,9 +11,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -69,10 +67,10 @@ public class VLogReasonerCombinedInputs { final DataSourceDeclaration qCDFromCsv; public VLogReasonerCombinedInputs() throws IOException { - qFromCsv = new DataSourceDeclarationImpl(q, new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); + qFromCsv = new DataSourceDeclarationImpl(q, new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv")); qCDFromCsv = new DataSourceDeclarationImpl(q, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv")); } @Test diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java index bbdc37316..3ec10b94f 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerCsvInput.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -61,17 +61,17 @@ public void testLoadEmptyCsvFile() throws IOException { final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, this.x); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv"))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv")); FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv.gz"))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv.gz")); } @Test public void testLoadUnaryFactsFromCsvFile() throws IOException { - testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv"))); - testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz"))); + testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv")); + testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz")); } private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws IOException { @@ -107,7 +107,7 @@ private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource file public void testLoadNonexistingCsvFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.csv"); assertFalse(nonexistingFile.exists()); - final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile); + final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile.getName()); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); @@ -118,8 +118,8 @@ public void testLoadNonexistingCsvFile() throws IOException { @Test(expected = IncompatiblePredicateArityException.class) public void testLoadCsvFileWrongArity() throws IOException { - final FileDataSource fileDataSource = new CsvFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv")); + final FileDataSource fileDataSource = new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java index 0d76dc569..f9b52ad44 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerRdfInput.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -62,7 +62,7 @@ public class VLogReasonerRdfInput { @Test public void testLoadEmptyRdfFile() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, - new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt"))); + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt")); } @Ignore @@ -71,19 +71,19 @@ public void testLoadEmptyRdfFile() throws IOException { @Test public void testLoadEmptyRdfFileGz() throws IOException { FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, - new RdfFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz"))); + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz")); } @Test public void testLoadTernaryFactsFromRdfFile() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt"))); + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); } @Test public void testLoadTernaryFactsFromRdfFileGz() throws IOException { testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( - new File(FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz"))); + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz")); } public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { @@ -104,7 +104,7 @@ public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fil public void testLoadNonexistingRdfFile() throws IOException { final File nonexistingFile = new File("nonexistingFile.nt"); assertFalse(nonexistingFile.exists()); - final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile); + final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile.getName()); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); @@ -115,8 +115,8 @@ public void testLoadNonexistingRdfFile() throws IOException { @Test public void testLoadRdfInvalidFormat() throws IOException { - final FileDataSource fileDataSource = new RdfFileDataSource(new File( - FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt")); + final FileDataSource fileDataSource = new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt"); final KnowledgeBase kb = new KnowledgeBase(); kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java index 8f85f806e..91c61c680 100644 --- a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/VLogReasonerStateTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -221,7 +220,7 @@ public void testResetKeepExplicitDatabase() throws IOException { // assert r(d) final Predicate predicateR1 = Expressions.makePredicate("r", 1); kb.addStatement(new DataSourceDeclarationImpl(predicateR1, - new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, "constantD.csv")))); + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "constantD.csv"))); // p(?x) -> q(?x) try (final VLogReasoner reasoner = new VLogReasoner(kb)) { diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java index 6c1e9f19d..971fedb8b 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -67,8 +67,7 @@ public static void main(final String[] args) throws IOException { /* Configure RDF data source */ final Predicate doidTriplePredicate = Expressions.makePredicate("doidTriple", 3); - final DataSource doidDataSource = new RdfFileDataSource( - new File(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz")); + final DataSource doidDataSource = new RdfFileDataSource(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz"); kb.addStatement(new DataSourceDeclarationImpl(doidTriplePredicate, doidDataSource)); /* Configure SPARQL data sources */ @@ -99,8 +98,8 @@ public static void main(final String[] args) throws IOException { while (parser.hasNext()) { final Object object = parser.next(); if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { - kb.addStatement( - GraalToRulewerkModelConverter.convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); + kb.addStatement(GraalToRulewerkModelConverter + .convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); } } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java index 338b024e8..7979f154f 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.List; @@ -42,12 +41,12 @@ public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); + String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); try { - return new CsvFileDataSource(file); + return new CsvFileDataSource(fileName); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } } } diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java index b27f52d21..ee7a2ec79 100644 --- a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,6 @@ * #L% */ -import java.io.File; import java.io.IOException; import java.util.List; @@ -42,12 +41,12 @@ public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclaratio public DataSource handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException { DirectiveHandler.validateNumberOfArguments(arguments, 1); - File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "source file"); + String fileName = DirectiveHandler.validateStringArgument(arguments.get(0), "source file"); try { - return new RdfFileDataSource(file); + return new RdfFileDataSource(fileName); } catch (IOException e) { - throw new ParsingException("Could not use source file \"" + file.getName() + "\": " + e.getMessage(), e); + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); } } } diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java index 21919dd91..14c2bceb3 100644 --- a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,7 +23,6 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; @@ -56,14 +55,14 @@ public class RuleParserDataSourceTest { @Test public void testCsvSource() throws ParsingException, IOException { String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; - CsvFileDataSource csvds = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + CsvFileDataSource csvds = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @Test public void testRdfSource() throws ParsingException, IOException { String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; - RdfFileDataSource rdfds = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); + RdfFileDataSource rdfds = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); } @@ -168,7 +167,7 @@ public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingExcep public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("p", 3); - RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(new File(EXAMPLE_RDF_FILE_PATH)); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedRdfFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -179,7 +178,7 @@ public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingExceptio public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { KnowledgeBase kb = new KnowledgeBase(); Predicate predicate1 = Expressions.makePredicate("q", 1); - CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, unzippedCsvFileDataSource); RuleParser.parseInto(kb, dataSourceDeclaration.toString()); @@ -201,7 +200,7 @@ public DataSource handleDirective(List arguments, SubParserFa throws ParsingException { CsvFileDataSource source; try { - source = new CsvFileDataSource(new File(EXAMPLE_CSV_FILE_PATH)); + source = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); } catch (IOException e) { throw new ParsingException(e); } From 32c0795af142b68dfd9e59cbdcdb02cdc04e9a00 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 18:33:25 +0100 Subject: [PATCH 0842/1255] Drop dependency on eclipse.jdt.annotations Fixes #79. --- pom.xml | 6 ------ .../model/implementation/AbstractLiteralImpl.java | 7 +++---- .../model/implementation/NegativeLiteralImpl.java | 7 +++---- .../model/implementation/PositiveLiteralImpl.java | 7 +++---- .../core/model/implementation/PredicateImpl.java | 11 +++++------ .../implementation/SparqlQueryResultDataSource.java | 9 ++++----- .../examples/core/ConfigureReasonerLogging.java | 11 +++++------ 7 files changed, 23 insertions(+), 35 deletions(-) diff --git a/pom.xml b/pom.xml index aa884b190..d3e85c1e7 100644 --- a/pom.xml +++ b/pom.xml @@ -66,7 +66,6 @@ UTF-8 - 2.1.100 4.12 2.28.2 1.7.28 @@ -81,11 +80,6 @@ - - org.eclipse.jdt - org.eclipse.jdt.annotation - ${eclipse.jdt.annotation.version} - junit junit diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java index e19864aee..d245da52a 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,6 @@ import java.util.stream.Stream; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.Literal; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; @@ -52,7 +51,7 @@ public abstract class AbstractLiteralImpl implements Literal { * @param terms non-empty list of non-null terms. List size must be the same * as the predicate arity. */ - public AbstractLiteralImpl(@NonNull final Predicate predicate, @NonNull final List terms) { + public AbstractLiteralImpl(final Predicate predicate, final List terms) { Validate.notNull(predicate, "Literal predicates cannot be null."); Validate.noNullElements(terms, "Null terms cannot appear in literals. The list contains a null at position [%d]."); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java index 554ae0f63..cf7b69212 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,14 +22,13 @@ import java.util.List; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; public class NegativeLiteralImpl extends AbstractLiteralImpl implements NegativeLiteral { - public NegativeLiteralImpl(@NonNull Predicate predicate, @NonNull List terms) { + public NegativeLiteralImpl(final Predicate predicate, final List terms) { super(predicate, terms); } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java index c0aba7096..e95d5cfaa 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,14 +22,13 @@ import java.util.List; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; import org.semanticweb.rulewerk.core.model.api.Predicate; import org.semanticweb.rulewerk.core.model.api.Term; public class PositiveLiteralImpl extends AbstractLiteralImpl implements PositiveLiteral { - public PositiveLiteralImpl(@NonNull Predicate predicate, @NonNull List terms) { + public PositiveLiteralImpl(final Predicate predicate, final List terms) { super(predicate, terms); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 6ec346dae..04741fb8e 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,13 +21,12 @@ */ import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.Predicate; /** * Implementation for {@link Predicate}. Supports predicates of arity 1 or * higher. - * + * * @author Irina Dragoste * */ @@ -39,11 +38,11 @@ public class PredicateImpl implements Predicate { /** * Constructor for {@link Predicate}s of arity 1 or higher. - * + * * @param name a non-blank String (not null, nor empty or whitespace). * @param arity an int value strictly greater than 0. */ - public PredicateImpl(@NonNull String name, int arity) { + public PredicateImpl(final String name, int arity) { Validate.notBlank(name, "Predicates cannot be named by blank Strings."); Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java index 2ee6c900b..0015bece5 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,7 +26,6 @@ import java.util.Optional; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; import org.semanticweb.rulewerk.core.model.api.Variable; import org.semanticweb.rulewerk.core.model.implementation.Serializer; @@ -83,8 +82,8 @@ public SparqlQueryResultDataSource(final URL endpoint, final String queryVariabl */ // TODO add examples to javadoc // TODO add illegal argument exceptions to javadoc - public SparqlQueryResultDataSource(@NonNull final URL endpoint, - @NonNull final LinkedHashSet queryVariables, @NonNull final String queryBody) { + public SparqlQueryResultDataSource(final URL endpoint, + final LinkedHashSet queryVariables, final String queryBody) { Validate.notNull(endpoint, "Endpoint cannot be null."); Validate.notNull(queryVariables, "Query variables ordered set cannot be null."); Validate.noNullElements(queryVariables, "Query variables cannot be null or contain null elements."); diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java index b29262a49..81039cf41 100644 --- a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -9,9 +9,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +22,6 @@ import java.io.IOException; -import org.eclipse.jdt.annotation.Nullable; import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; import org.semanticweb.rulewerk.core.reasoner.LogLevel; import org.semanticweb.rulewerk.core.reasoner.Reasoner; @@ -52,17 +51,17 @@ public class ConfigureReasonerLogging { * Path to the file where the default WARNING level reasoner logs will be * exported. */ - private static @Nullable String reasonerWarningLogFilePath = logsFolder + "ReasonerWarningLogFile.log"; + private static String reasonerWarningLogFilePath = logsFolder + "ReasonerWarningLogFile.log"; /** * Path to the file where INFO level reasoner logs will be exported. */ - private static @Nullable String reasonerInfoLogFilePath = logsFolder + "ReasonerInfoLogFile.log"; + private static String reasonerInfoLogFilePath = logsFolder + "ReasonerInfoLogFile.log"; /** * Path to the file where DEBUG level reasoner logs will be exported. */ - private static @Nullable String reasonerDebugLogFilePath = logsFolder + "ReasonerDebugLogFile.log"; + private static String reasonerDebugLogFilePath = logsFolder + "ReasonerDebugLogFile.log"; public static void main(final String[] args) throws IOException, ParsingException { From 5ae828c05c3f6b1963639383c0e8d3cbd615d956 Mon Sep 17 00:00:00 2001 From: Maximilian Marx Date: Mon, 2 Mar 2020 19:06:23 +0100 Subject: [PATCH 0843/1255] Core: Add serialisation for KnowledgeBase --- RELEASE-NOTES.md | 1 + .../core/model/implementation/Serializer.java | 91 ++++++++++++++----- .../rulewerk/core/reasoner/KnowledgeBase.java | 35 ++++++- .../core/reasoner/KnowledgeBaseTest.java | 14 ++- 4 files changed, 117 insertions(+), 24 deletions(-) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 159b43158..24f38e9e0 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -15,6 +15,7 @@ Breaking changes: New features: * Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` * All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` * Rules files may import other rules files using `@import` and `@import-relative`, where the latter resolves relative IRIs using the current base IRI, unless the imported file explicitly specifies diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java index d83045815..ef0c9f766 100644 --- a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -94,7 +94,9 @@ private Serializer() { /** * Creates a String representation of a given {@link Rule}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param rule a {@link Rule}. * @return String representation corresponding to a given {@link Rule}. * @@ -106,7 +108,9 @@ public static String getString(final Rule rule) { /** * Creates a String representation of a given {@link Conjunction}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param conjunction a {@link Conjunction} * @return String representation corresponding to a given {@link Conjunction}. */ @@ -127,7 +131,9 @@ public static String getString(final Conjunction conjunction) /** * Creates a String representation of a given {@link Literal}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param literal a {@link Literal} * @return String representation corresponding to a given {@link Literal}. */ @@ -143,7 +149,9 @@ public static String getString(final Literal literal) { /** * Creates a String representation of a given {@link Fact}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param fact a {@link Fact} * @return String representation corresponding to a given {@link Fact}. */ @@ -154,7 +162,9 @@ public static String getFactString(final Fact fact) { /** * Creates a String representation of a given {@link AbstractConstant}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param constant a {@link AbstractConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -167,7 +177,9 @@ public static String getString(final AbstractConstant constant, FunctionRule syntax . + * @see Rule + * syntax . * @param constant a {@link AbstractConstant} * @return String representation corresponding to a given * {@link AbstractConstant}. @@ -180,7 +192,9 @@ public static String getString(final AbstractConstant constant) { * Creates a String representation corresponding to the name of a given * {@link LanguageStringConstant}. * - * @see Rule syntax . + * @see Rule + * syntax . * @param languageStringConstant a {@link LanguageStringConstant} * @return String representation corresponding to the name of a given * {@link LanguageStringConstant}. @@ -202,10 +216,14 @@ public static String getConstantName(final LanguageStringConstant languageString *
      • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
      • *
      • {@code "42"^^xsd:Integer} results in {@code 42},
      • *
      • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
      • - *
      • {@code "test"^^} results in {@code "test"^^}, modulo transformation of the datatype IRI.
      • + *
      • {@code "test"^^} results in + * {@code "test"^^}, modulo transformation of the datatype + * IRI.
      • *
      * - * @see Rule syntax . + * @see Rule + * syntax . * @param datatypeConstant a {@link DatatypeConstant} * @param iriTransformer a function to transform IRIs with. * @return String representation corresponding to a given @@ -236,8 +254,10 @@ public static String getString(final DatatypeConstant datatypeConstant, Function *
    • {@code "23.0"^^xsd:Decimal} results in {@code 23.0},
    • *
    • {@code "42"^^xsd:Integer} results in {@code 42},
    • *
    • {@code "23.42"^^xsd:Double} results in {@code 23.42E0}, and
    • - *
    • {@code "test"^^} results in {@code "test"^^}.
    • + *
    • {@code "test"^^} results in + * {@code "test"^^}.
    • *